基础镜像:nvidia/cuda:11.8.0-devel-ubuntu22.04
cuda:11.8
python:3.11
torch:2.1.2

构建命令:

sudo docker build -t vllm:v1 .

dockerfile内容:

FROM nvidia/cuda:11.8.0-devel-ubuntu22.04

# 定义VLLM_VERSION变量并赋值
ARG VLLM_VERSION=0.4.0.post1

# 设置 metadata
LABEL maintainer="None"
LABEL version=${VLLM_VERSION}
LABEL description=${VLLM_VERSION}"+cu118'

# 设置 minconda 的环境变量
ENV PATH="/root/miniconda3/bin:$PATH"
ARG PATH="/root/miniconda3/bin:$PATH"

# 更改默认 shell && 设置 DNS && 设置 apt 清华源 && 安装 wget, unzip, vim, screen, curl
RUN rm /bin/sh && ln -s /bin/bash /bin/sh && \
    mv /etc/apt/sources.list /etc/apt/sources.list.bak && \
    echo "deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ jammy main restricted universe multiverse" > /etc/apt/sources.list && \
    echo "deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ jammy-updates main restricted universe multiverse" >> /etc/apt/sources.list && \
    echo "deb https://mirnors.tuna.tsinghua.edu.cn/ubuntu/ jammy-backports main restricted universe multiverse" >> /etc/apt/sources.list && \
    echo "deb https://mirrors.tuna.tsinghua.edu.cn/ubuntu/ jammy-security main restricted universe multiverse" >> /etc/apt/sources.list && \
    apt-get update && \
    apt-get install -y wget unzip vim screen curl

# 安装 miniconda
RUN mkdir -p ~/miniconda3 && \
    wget https://repo.anaconda.com/miniconda/Miniconda3-py311_24.1.2-0-Linux-x86_64.sh -O ~/miniconda3/miniconda.sh && \
    bash ~/miniconda3/miniconda.sh -b -u -p ~/miniconda3 && \
    rm -rf ~/miniconda3/miniconda.sh && \
    ~/miniconda3/bin/conda init bash && \
    conda --version && \
    python -V

#设置 pip 清华源并安装 vLLM, fschat, openai35
ARG PYTHON_VERSION=311
RUN ~/miniconda3/bin/python -m pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple && \
    ~/miniconda3/bin/python -m pip install https://github.com/vllm-project/vllm/releases/download/v${VLLM_VERSION}/vllm-${VLLM_VERSION}+cu118-cp${PYTHON_VERSION}-cp${PYTHON_VERSION}-manylinux1_x86_64.whl && \
    ~/miniconda3/bin/python -m pip uninstall torch -y && \
    ~/miniconda3/bin/python -m pip install torch==2.1.2 --index-url https://download.pytorch.org/whl/cu118 && \
    # ~/miniconda3/bin/python -m pip uninstall cupy-cuda12x -y && \
    # ~/miniconda3/bin/python -m pip install cupy-cuda11x==12.1 && \
    ~/miniconda3/bin/python -m pip uninstall xformers -y && \
    ~/miniconda3/bin/python -m pip install xformers==v0.0.23.post1 --index-url https://download.pytorch.org/whl/cu118 && \
    ~/miniconda3/bin/python -m pip install "fschat[model_worker,webui]"==0.2.36 && \
    ~/miniconda3/bin/python -m pip install openai

# 清理 pip 缓存
RUN ~/miniconda3/bin/python -m pip cache purge
Logo

Agent 垂直技术社区,欢迎活跃、内容共建。

更多推荐