welcom ! Handel home

2025年3月31日 星期一

Ubuntu 24.04 Docker and GPU drive by ollama +webui for LLM

 #=== ubuntu 24.04 安裝 Docker from ===

$sudo apt-get update

$sudo apt-get install ca-certificates curl gnupg

# Add Docker's official GPG key:

$sudo apt-get update

$sudo apt-get install ca-certificates curl

$sudo install -m 0755 -d /etc/apt/keyrings

$sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc

$sudo chmod a+r /etc/apt/keyrings/docker.asc


# Add the repository to Apt sources:

echo \

  "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \

  $(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \

  sudo tee /etc/apt/sources.list.d/docker.list > /dev/null

$sudo apt-get update

$sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin

#=== test docker is working ===

$sudo docker run hello-world


#==== ubuntu 24.04 install nvidia container-toolkit ======

$curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \

  && curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \

    sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \

 sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list


$sudo sed -i -e '/experimental/ s/^#//g' /etc/apt/sources.list.d/nvidia-container-toolkit.list

$sudo apt-get update

$sudo apt-get install -y nvidia-container-toolkit

$sudo systemctl restart docker


#== working Direct build ===
$cd work_prj
$mkdir ollama-data
$mkdir webuidata

#=== docker startup ollama ====
$sudo docker run -d --name ollama \
  --gpus all \
  --restart always \
  -p 11434:11434 \
  -v ./ollama-data:/root/.ollama \
  -e NVIDIA_VISIBLE_DEVICES=all \
  -e NVIDIA_DRIVER_CAPABILITIES=compute,utility \
  ollama/ollama

#=== docker startup webui for llm ===
$sudo docker run -d \
  --name open-webui \
  --restart always \
  -p 3100:8080 \
  -v ./webuidata:/app/backend/data \
  -e OLLAMA_API_BASE_URL=http://localhost:11434 \
  ghcr.io/open-webui/open-webui:main




















沒有留言: