Ollama
sudo docker run --gpus all --rm -it --network=host -p 11434:11434 -v ~/ollama:/ollama -v ~/ollama/logs:/data/logs -e LD_LIBRARY_PATH=/usr/local/cuda/lib64:/usr/lib/aarch64-linux-gnu/ -e OLLAMA_MODELS=/ollama dustynv/ollama:r36.2.0
open webUI
docker run -it --rm --network=host --add-host=host.docker.internal:host-gateway -e OLLAMA_API_BASE_URL=http://127.0.0.1:11434 ghcr.io/open-webui/open-webui:main