Docker on Nvidia Jetson Nano

#!/bin/bash
# Copied and modified from https://github.com/Technica-Corporation/Tegra-Docker
# Copyright (c) 2017, Technica Corporation. All rights reserved.

NV_LIBS="/usr/lib/aarch64-linux-gnu \
	 /usr/local/cuda/lib64 \
	/usr/local/cuda \
	 /usr/src/tensorrt \
         /usr/local/cuda-10.0 \
         /usr/include \
	/usr/src "

LD_PATH="/usr/lib/aarch64-linux-gnu \
         /usr/lib/aarch64-linux-gnu/tegra \
         /usr/local/cuda/lib64 \
	 /usr/src/tensorrt \
	/usr/local/cuda \
	 /usr/local/cuda-10.0 \
	 /usr/include"

GPU_DEVICES="/dev/nvhost-ctrl \
             /dev/nvhost-ctrl-gpu \
             /dev/nvhost-prof-gpu \
             /dev/nvmap \
             /dev/nvhost-gpu \
             /dev/nvhost-as-gpu"

NV_DOCKER_ARGS="--net=host"

build_docker_args() {
        #set the required libraries as volumes on the docker container

        LIB_ARGS=""
        for lib in $NV_LIBS; do
                LIB_ARGS="$LIB_ARGS -v $lib:$lib"
        done

        #set the required devices to be passed through to the container
        DEV_ARGS=""
        for dev in $GPU_DEVICES; do
                DEV_ARGS="$DEV_ARGS --device=$dev"
        done

        NV_DOCKER_ARGS="$NV_DOCKER_ARGS $LIB_ARGS $DEV_ARGS"
}

build_env() {
        #build the LD_LIBRARY_PATH
        LD_LIBRARY_PATH=""
        for lib in $LD_PATH; do
                LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$lib"
        done
}

if [[ $# -ge 2 && $1 == "run" ]]; then
        echo "Running an nvidia docker image"
        build_docker_args
	build_env

	DOCKER_OPTS="-e LD_LIBRARY_PATH=$LD_LIBRARY_PATH $NV_DOCKER_ARGS ${@:2}"
        echo "docker run $DOCKER_OPTS"
        docker run $DOCKER_OPTS
fi