diff --git a/AI/vllm-ascend/0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile b/AI/vllm-ascend/0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..0e69bb2132995fc82aded8a9f6426d16fa6d9f88 --- /dev/null +++ b/AI/vllm-ascend/0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile @@ -0,0 +1,41 @@ +FROM quay.io/ascend/cann:8.1.rc1-910b-openeuler22.03-py3.10 + +ARG PIP_INDEX_URL="https://mirrors.tuna.tsinghua.edu.cn/pypi/web/simple" +ARG COMPILE_CUSTOM_KERNELS=1 + +ENV COMPILE_CUSTOM_KERNELS=${COMPILE_CUSTOM_KERNELS} + +RUN yum update -y && \ + yum install -y python3-pip git vim wget net-tools make gcc gcc-c++ && \ + rm -rf /var/cache/yum &&\ + rm -rf /tmp/* + +RUN pip config set global.index-url ${PIP_INDEX_URL} + +WORKDIR /vllm-workspace + +# Install vLLM +ARG VLLM_REPO=https://github.com/vllm-project/vllm.git +ARG VLLM_ASCEND_REPO=https://github.com/vllm-project/vllm-ascend.git +ARG VLLM_TAG=v0.9.0 +ARG VLLM_ASCEND_TAG=v0.9.0rc2 + +RUN git clone $VLLM_REPO --branch $VLLM_TAG /vllm-workspace/vllm +# In x86, triton will be installed by vllm. But in Ascend, triton doesn't work correctly. we need to uninstall it. +RUN VLLM_TARGET_DEVICE="empty" python3 -m pip install -e /vllm-workspace/vllm/ --extra-index https://download.pytorch.org/whl/cpu/ && \ + python3 -m pip uninstall -y triton && \ + python3 -m pip cache purge + + +RUN git clone $VLLM_ASCEND_REPO --branch $VLLM_ASCEND_TAG /vllm-workspace/vllm-ascend +# Install vllm-ascend +RUN source /usr/local/Ascend/ascend-toolkit/set_env.sh && \ + source /usr/local/Ascend/nnal/atb/set_env.sh && \ + export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/Ascend/ascend-toolkit/latest/`uname -i`-linux/devlib && \ + python3 -m pip install -e /vllm-workspace/vllm-ascend/ --extra-index https://download.pytorch.org/whl/cpu/ && \ + python3 -m pip cache purge + +# Install modelscope (for fast download) and ray (for multinode) +RUN python3 -m pip install modelscope ray + +CMD ["/bin/bash"] \ No newline at end of file diff --git a/AI/vllm-ascend/README.md b/AI/vllm-ascend/README.md index 8a0db907e5ecb037500070003c63b484c6658d75..afa2bfb01883314b865e8ad04b6d00ceb8baca3d 100644 --- a/AI/vllm-ascend/README.md +++ b/AI/vllm-ascend/README.md @@ -29,6 +29,7 @@ The tag of each vLLM Ascend docker image is consist of the version of vLLM Ascen |[0.8.4rc1-torch_npu2.5.1-cann8.0.0-python3.10-oe2203lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/vllm-ascend/0.8.4rc1-torch_npu2.5.1-cann8.0.0-python3.10/22.03-lts/Dockerfile)| vLLM Ascend 0.8.4rc1 on openEuler 22.03-LTS | amd64, arm64 | |[0.8.5rc1-torch_npu2.5.1-cann8.1.rc1-python3.10-oe2203lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/vllm-ascend/0.8.5rc1-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile)| vLLM Ascend 0.8.5rc1 on openEuler 22.03-LTS | amd64, arm64 | |[0.9.0rc1-torch_npu2.5.1-cann8.1.rc1-python3.10-oe2203lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/vllm-ascend/0.9.0rc1-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile)| vLLM Ascend 0.9.0rc1 on openEuler 22.03-LTS | amd64, arm64 | +|[0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10-oe2203lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/vllm-ascend/0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile)| vLLM Ascend 0.9.0rc2 on openEuler 22.03-LTS | amd64, arm64 | # Usage diff --git a/AI/vllm-ascend/meta.yml b/AI/vllm-ascend/meta.yml index 794185c204c00327152375593e2e7664548eb32d..faf73108fbbb6ef8c431246c604a41988b95b9e6 100644 --- a/AI/vllm-ascend/meta.yml +++ b/AI/vllm-ascend/meta.yml @@ -12,3 +12,6 @@ 0.9.0rc1-torch_npu2.5.1-cann8.1.rc1-python3.10-oe2203lts: path: 0.9.0rc1-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile + +0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10-oe2203lts: + path: 0.9.0rc2-torch_npu2.5.1-cann8.1.rc1-python3.10/22.03-lts/Dockerfile \ No newline at end of file