From d0d5bb6a3f29ee9a8003704ea6429898c380fdbf Mon Sep 17 00:00:00 2001 From: GuangJie1 Date: Mon, 28 Oct 2024 14:21:17 +0800 Subject: [PATCH] ChatQna: suppport aarch64 --- chatqna-ui/meta.yml | 3 +- chatqna/meta.yml | 3 +- .../0.3.4-redis5.1.1/24.03-lts/Dockerfile | 60 ++++++++++++----- dataprep-redis-langchain/meta.yml | 3 +- .../0.3.3/24.03-lts/Dockerfile | 25 ++++++-- embedding-tei-langchain/meta.yml | 3 +- .../0.3.3-npu/24.03-lts/Dockerfile | 64 +++++++++++++++++++ llm-vllm-langchain/meta.yml | 4 +- reranking-tei/1.0/24.03-lts/Dockerfile | 15 ++++- reranking-tei/meta.yml | 3 +- .../0.3.3-redis5.1.1/24.03-lts/Dockerfile | 15 ++++- retriever-redis-langchain/meta.yml | 3 +- .../1.5/24.03-lts/Dockerfile | 54 +++++----------- text-embeddings-inference-cpu/meta.yml | 3 +- .../22.03-lts/Dockerfile | 11 ++++ vllm/meta.yml | 4 +- 16 files changed, 196 insertions(+), 77 deletions(-) create mode 100644 llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile create mode 100644 vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile diff --git a/chatqna-ui/meta.yml b/chatqna-ui/meta.yml index e3709ef9..05addfa6 100644 --- a/chatqna-ui/meta.yml +++ b/chatqna-ui/meta.yml @@ -1,3 +1,2 @@ 1.0-oe2403lts: - path: chatqna-ui/1.0/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: chatqna-ui/1.0/24.03-lts/Dockerfile \ No newline at end of file diff --git a/chatqna/meta.yml b/chatqna/meta.yml index 33875eab..9b63f5d4 100644 --- a/chatqna/meta.yml +++ b/chatqna/meta.yml @@ -1,3 +1,2 @@ 1.0-oe2403lts: - path: chatqna/1.0/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: chatqna/1.0/24.03-lts/Dockerfile \ No newline at end of file diff --git a/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile b/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile index 0bbebe0f..d7e447e9 100644 --- a/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile +++ b/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile @@ -18,22 +18,52 @@ RUN yum update -y && \ python-pip python \ tesseract \ tesseract-langpack-eng \ - git - + git \ + java-1.8.0-openjdk-devel \ + gtk3-devel \ + glib2-devel \ + glibmm24-devel \ + libtool \ + cmake gcc-c++ \ + python3-devel \ + python3-pip \ + gperf \ + boost-devel \ + gstreamer1-devel \ + gstreamer1-plugins-base-devel \ + libpng-devel \ + libjpeg-devel \ + libtiff-devel \ + libxml2-devel \ + libxslt-devel \ + cups-devel \ + libSM-devel \ + libICE-devel \ + nss-devel nspr-devel \ + libXt-devel \ + bison \ + flex \ + diffutils \ + patch \ + unzip \ + ant \ + junit + +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl -RUN mkdir -p /tmp/LibreOffice/LibreOffice && \ - wget -O /tmp/LibreOffice/LibreOffice.tar.gz https://mirrors.cloud.tencent.com/libreoffice/libreoffice/stable/24.8.2/rpm/x86_64/LibreOffice_24.8.2_Linux_x86-64_rpm.tar.gz && \ - tar -zxvf /tmp/LibreOffice/LibreOffice.tar.gz -C /tmp/LibreOffice/LibreOffice/ --strip-components 1 && \ - yum -y install /tmp/LibreOffice/LibreOffice/RPMS/*.rpm && \ - mkdir -p /tmp/LibreOffice/langpack_zh-CN && \ - wget -O /tmp/LibreOffice/langpack_zh-CN.tar.gz https://mirrors.cloud.tencent.com/libreoffice/libreoffice/stable/24.8.2/rpm/x86_64/LibreOffice_24.8.2_Linux_x86-64_rpm_langpack_zh-CN.tar.gz && \ - tar -zxvf /tmp/LibreOffice/langpack_zh-CN.tar.gz -C /tmp/LibreOffice/langpack_zh-CN/ --strip-components 1 && \ - yum -y install /tmp/LibreOffice/langpack_zh-CN/RPMS/*.rpm && \ - mkdir -p /tmp/LibreOffice/helppack_zh-CN && \ - wget -O /tmp/LibreOffice/helppack_zh-CN.tar.gz https://mirrors.cloud.tencent.com/libreoffice/libreoffice/stable/24.8.2/rpm/x86_64/LibreOffice_24.8.2_Linux_x86-64_rpm_helppack_zh-CN.tar.gz && \ - tar -zxvf /tmp/LibreOffice/helppack_zh-CN.tar.gz -C /tmp/LibreOffice/helppack_zh-CN/ --strip-components 1 && \ - yum -y install /tmp/LibreOffice/helppack_zh-CN/RPMS/*.rpm && \ - rm -rf /tmp/LibreOffice +RUN git clone --depth 1 https://git.libreoffice.org/core /tmp/libreoffice && \ + cd /tmp/libreoffice && chmod +x ./autogen.sh && bash -x ./autogen.sh && \ + make -j "$(nproc)" && make install && \ + rm -rf /tmp/libreoffice RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ diff --git a/dataprep-redis-langchain/meta.yml b/dataprep-redis-langchain/meta.yml index 079bce74..1d60ced3 100644 --- a/dataprep-redis-langchain/meta.yml +++ b/dataprep-redis-langchain/meta.yml @@ -1,3 +1,2 @@ 0.3.4-redis5.1.1-oe2403lts: - path: dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile \ No newline at end of file diff --git a/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile b/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile index 3935c5b2..e6200f0d 100644 --- a/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile +++ b/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile @@ -8,17 +8,23 @@ ARG ARCH="cpu" RUN yum update -y && \ yum install -y \ --setopt=install_weak_deps=False \ - python-pip python \ + python-pip python python3-devel \ + cmake gcc-c++ git \ shadow-utils \ mesa-libGL \ jemalloc-devel \ git -RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx -USER user +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl WORKDIR /home/user/ @@ -26,8 +32,13 @@ RUN git clone https://gitee.com/zhihang161013/GenAIComps.git && \ cp -r GenAIComps/comps . && \ rm -rf GenAIComps -RUN pip install --no-cache-dir --upgrade pip && \ - if [ ${ARCH} = "cpu" ]; then pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu; fi && \ +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +USER user + +RUN if [ ${ARCH} = "cpu" ]; then pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu; fi && \ pip install --no-cache-dir -r /home/user/comps/embeddings/tei/langchain/requirements.txt ENV PYTHONPATH=/usr/bin/python:/home/user diff --git a/embedding-tei-langchain/meta.yml b/embedding-tei-langchain/meta.yml index 945e7c91..1192bf09 100644 --- a/embedding-tei-langchain/meta.yml +++ b/embedding-tei-langchain/meta.yml @@ -1,3 +1,2 @@ 0.3.3-oe2403lts: - path: embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile \ No newline at end of file diff --git a/llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile b/llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile new file mode 100644 index 00000000..093482a9 --- /dev/null +++ b/llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile @@ -0,0 +1,64 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +FROM openeuler/openeuler:22.03-lts + +RUN yum update -y && \ + yum install -y --setopt=install_weak_deps=False \ + python-pip python python3-devel \ + cmake gcc-c++ \ + shadow-utils \ + mesa-libGL \ + jemalloc-devel \ + git + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +USER user + +WORKDIR /home/user/ + +RUN git clone https://gitee.com/zhihang161013/GenAIComps.git && \ + cp -r GenAIComps/comps . && \ + rm -rf GenAIComps + +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl && \ + rm -rf /tmp/manifold + +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir \ + fastapi \ + huggingface_hub \ + langchain \ + langchain-huggingface \ + langchain-openai \ + langchain_community \ + langchainhub \ + opentelemetry-api \ + opentelemetry-exporter-otlp \ + opentelemetry-sdk \ + prometheus-fastapi-instrumentator \ + shortuuid \ + transformers \ + uvicorn + +RUN pip3 install docarray[full] +RUN git clone -b npu_support --depth 1 https://github.com/wangshuai09/vllm.git +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" python3 -m pip install -r ./vllm/requirements-build.txt +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" VLLM_TARGET_DEVICE="npu" python3 -m pip install ./vllm/ && \ + rm -rf ./vllm + +ENV PYTHONPATH=/usr/bin/python:/home/user +WORKDIR /home/user/comps/llms/text-generation/vllm/langchain + +ENTRYPOINT ["bash", "entrypoint.sh"] diff --git a/llm-vllm-langchain/meta.yml b/llm-vllm-langchain/meta.yml index 03e45bd4..003354fc 100644 --- a/llm-vllm-langchain/meta.yml +++ b/llm-vllm-langchain/meta.yml @@ -1,3 +1,5 @@ 0.3.3-oe2403lts: path: llm-vllm-langchain/0.3.3/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + arch: x86_64 +0.3.3-npu-oe2203lts: + path: llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile \ No newline at end of file diff --git a/reranking-tei/1.0/24.03-lts/Dockerfile b/reranking-tei/1.0/24.03-lts/Dockerfile index dcc63314..d9998627 100644 --- a/reranking-tei/1.0/24.03-lts/Dockerfile +++ b/reranking-tei/1.0/24.03-lts/Dockerfile @@ -7,12 +7,25 @@ ARG ARCH="cpu" RUN yum update -y && \ yum install -y --setopt=install_weak_deps=False \ - python-pip python \ + python-pip python python3-devel \ + cmake gcc-c++ git \ shadow-utils \ mesa-libGL \ jemalloc-devel \ git +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl && \ + rm -rf /tmp/manifold + RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/reranking-tei/meta.yml b/reranking-tei/meta.yml index 39a54523..3423455e 100644 --- a/reranking-tei/meta.yml +++ b/reranking-tei/meta.yml @@ -1,3 +1,2 @@ 1.0-oe2403lts: - path: reranking-tei/1.0/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: reranking-tei/1.0/24.03-lts/Dockerfile \ No newline at end of file diff --git a/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile b/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile index bb309c9c..72fc226d 100644 --- a/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile +++ b/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile @@ -7,12 +7,25 @@ ARG ARCH="cpu" RUN yum update -y && \ yum install -y --setopt=install_weak_deps=False \ - python-pip python \ + python-pip python python3-devel \ + cmake gcc-c++ git \ shadow-utils \ mesa-libGL \ jemalloc-devel \ git +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl && \ + rm -rf /tmp/manifold + RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/retriever-redis-langchain/meta.yml b/retriever-redis-langchain/meta.yml index f07b1cb6..b491223f 100644 --- a/retriever-redis-langchain/meta.yml +++ b/retriever-redis-langchain/meta.yml @@ -1,3 +1,2 @@ 0.3.3-redis5.1.1-oe2403lts: - path: retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile \ No newline at end of file diff --git a/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile b/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile index 7fb6efd4..79c5e9f5 100644 --- a/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile +++ b/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile @@ -1,13 +1,12 @@ FROM openeuler/openeuler:24.03-lts AS chef +ARG TARGETARCH +ARG BUILDARCH WORKDIR /usr/src - -ENV SCCACHE=0.5.4 -ENV RUSTC_WRAPPER=/usr/local/bin/sccache - RUN yum update -y && \ yum install -y \ --setopt=install_weak_deps=False \ openssl-devel \ + ca-certificates \ gcc \ g++ \ git @@ -16,62 +15,46 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -o rustup-init && chmod +x rustup-init && \ ./rustup-init -y && \ rm rustup-init - ENV PATH="/root/.cargo/bin:$PATH" - -# Donwload, configure sccache -RUN curl -fsSL https://github.com/mozilla/sccache/releases/download/v$SCCACHE/sccache-v$SCCACHE-x86_64-unknown-linux-musl.tar.gz | tar -xzv --strip-components=1 -C /usr/local/bin sccache-v$SCCACHE-x86_64-unknown-linux-musl/sccache && \ - chmod +x /usr/local/bin/sccache - RUN cargo install cargo-chef --locked FROM chef AS planner - RUN git clone https://github.com/huggingface/text-embeddings-inference.git - WORKDIR /usr/src/text-embeddings-inference - RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder - +RUN yum update -y && \ + yum install -y \ + --setopt=install_weak_deps=False \ + ca-certificates \ + openssl-devel ARG GIT_SHA ARG DOCKER_LABEL - -# sccache specific variables ARG ACTIONS_CACHE_URL ARG ACTIONS_RUNTIME_TOKEN -ARG SCCACHE_GHA_ENABLED - COPY --from=planner /usr/src/text-embeddings-inference/recipe.json recipe.json - -RUN cargo chef cook --release --features ort --no-default-features --recipe-path recipe.json && sccache -s - +RUN cargo chef cook --release --features ort --no-default-features --recipe-path recipe.json RUN git clone https://github.com/huggingface/text-embeddings-inference.git FROM builder AS http-builder WORKDIR /usr/src/text-embeddings-inference - -RUN cargo build --release --bin text-embeddings-router -F ort -F http --no-default-features && sccache -s +RUN cargo build --release --bin text-embeddings-router -F ort -F http --no-default-features FROM builder AS grpc-builder WORKDIR /usr/src/text-embeddings-inference - -RUN PROTOC_ZIP=protoc-21.12-linux-x86_64.zip && \ +RUN PROTOC_ARCH=$(if [ "$TARGETARCH" = "amd64" ]; then echo "x86_64"; elif [ "$TARGETARCH" = "arm64" ]; then echo "aarch64"; fi) && \ + PROTOC_ZIP=protoc-21.12-linux-$PROTOC_ARCH.zip && \ curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v21.12/$PROTOC_ZIP && \ unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \ rm -f $PROTOC_ZIP - COPY proto proto - -RUN cargo build --release --bin text-embeddings-router -F grpc -F ort --no-default-features && sccache -s +RUN cargo build --release --bin text-embeddings-router -F grpc -F ort --no-default-features FROM openeuler/openeuler:24.03-lts AS base - ENV HUGGINGFACE_HUB_CACHE=/data \ PORT=80 - RUN yum update -y && \ yum install -y \ --setopt=install_weak_deps=False \ @@ -79,24 +62,21 @@ RUN yum update -y && \ openssl-devel FROM base AS grpc - -COPY --from=grpc-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router /usr/local/bin/text-embeddings-router - +COPY --from=grpc-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router \ +/usr/local/bin/text-embeddings-router ENTRYPOINT ["text-embeddings-router"] CMD ["--json-output"] FROM base AS http - -COPY --from=http-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router /usr/local/bin/text-embeddings-router +COPY --from=http-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router \ +/usr/local/bin/text-embeddings-router # Amazon SageMaker compatible image FROM http AS sagemaker COPY --chmod=775 sagemaker-entrypoint.sh entrypoint.sh - ENTRYPOINT ["./entrypoint.sh"] # Default image FROM http - ENTRYPOINT ["text-embeddings-router"] CMD ["--json-output"] diff --git a/text-embeddings-inference-cpu/meta.yml b/text-embeddings-inference-cpu/meta.yml index fa1a239e..0677c909 100644 --- a/text-embeddings-inference-cpu/meta.yml +++ b/text-embeddings-inference-cpu/meta.yml @@ -1,3 +1,2 @@ 1.5-oe2403lts: - path: text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile \ No newline at end of file diff --git a/vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile b/vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile new file mode 100644 index 00000000..8b15ddf0 --- /dev/null +++ b/vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile @@ -0,0 +1,11 @@ +FROM ascendai/cann:8.0.rc3.alpha002-910b-openeuler22.03-py3.9 +RUN dnf update -y && \ + dnf install -y python3-pip git +WORKDIR /workspace +RUN git clone -b npu_support --depth 1 https://github.com/GuangJie1/vllm.git +# install build requirements +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" python3 -m pip install -r /workspace/vllm/requirements-build.txt +# build vLLM with NPU backend +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" VLLM_TARGET_DEVICE="npu" python3 -m pip install /workspace/vllm/ +ENV HF_ENDPOINT=https://hf-mirror.com +ENTRYPOINT ["bash", "-c", "source /root/.bashrc && exec python3 -m vllm.entrypoints.openai.api_server \"$@\"", "--"] \ No newline at end of file diff --git a/vllm/meta.yml b/vllm/meta.yml index f7f81620..af53a6a7 100644 --- a/vllm/meta.yml +++ b/vllm/meta.yml @@ -1,3 +1,5 @@ 0.6.3-oe2403lts: path: vllm/0.6.3/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + arch: x86_64 +0.6.3-cann8.0.rc3.alpha002-910b-py3.9-oe2203lts: + path: vllm-npu-openai/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile \ No newline at end of file -- Gitee