diff --git a/chatqna-ui/meta.yml b/chatqna-ui/meta.yml index e3709ef9c48b8c04aba19f212bdc14fd2c1848ce..05addfa68f6a22dbf2c2a71b3f2adbe6de51e695 100644 --- a/chatqna-ui/meta.yml +++ b/chatqna-ui/meta.yml @@ -1,3 +1,2 @@ 1.0-oe2403lts: - path: chatqna-ui/1.0/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: chatqna-ui/1.0/24.03-lts/Dockerfile \ No newline at end of file diff --git a/chatqna/meta.yml b/chatqna/meta.yml index 33875eab684da85b85a5039c6eef7f8d48ae52cb..9b63f5d406a2c97436684fae03b0ea52a63c640f 100644 --- a/chatqna/meta.yml +++ b/chatqna/meta.yml @@ -1,3 +1,2 @@ 1.0-oe2403lts: - path: chatqna/1.0/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: chatqna/1.0/24.03-lts/Dockerfile \ No newline at end of file diff --git a/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile b/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile index 0bbebe0f20396eb2c953c299a12ff64a5ed880f8..d7e447e9365ae93d2595dc98ad30d60fe5dcd597 100644 --- a/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile +++ b/dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile @@ -18,22 +18,52 @@ RUN yum update -y && \ python-pip python \ tesseract \ tesseract-langpack-eng \ - git - + git \ + java-1.8.0-openjdk-devel \ + gtk3-devel \ + glib2-devel \ + glibmm24-devel \ + libtool \ + cmake gcc-c++ \ + python3-devel \ + python3-pip \ + gperf \ + boost-devel \ + gstreamer1-devel \ + gstreamer1-plugins-base-devel \ + libpng-devel \ + libjpeg-devel \ + libtiff-devel \ + libxml2-devel \ + libxslt-devel \ + cups-devel \ + libSM-devel \ + libICE-devel \ + nss-devel nspr-devel \ + libXt-devel \ + bison \ + flex \ + diffutils \ + patch \ + unzip \ + ant \ + junit + +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl -RUN mkdir -p /tmp/LibreOffice/LibreOffice && \ - wget -O /tmp/LibreOffice/LibreOffice.tar.gz https://mirrors.cloud.tencent.com/libreoffice/libreoffice/stable/24.8.2/rpm/x86_64/LibreOffice_24.8.2_Linux_x86-64_rpm.tar.gz && \ - tar -zxvf /tmp/LibreOffice/LibreOffice.tar.gz -C /tmp/LibreOffice/LibreOffice/ --strip-components 1 && \ - yum -y install /tmp/LibreOffice/LibreOffice/RPMS/*.rpm && \ - mkdir -p /tmp/LibreOffice/langpack_zh-CN && \ - wget -O /tmp/LibreOffice/langpack_zh-CN.tar.gz https://mirrors.cloud.tencent.com/libreoffice/libreoffice/stable/24.8.2/rpm/x86_64/LibreOffice_24.8.2_Linux_x86-64_rpm_langpack_zh-CN.tar.gz && \ - tar -zxvf /tmp/LibreOffice/langpack_zh-CN.tar.gz -C /tmp/LibreOffice/langpack_zh-CN/ --strip-components 1 && \ - yum -y install /tmp/LibreOffice/langpack_zh-CN/RPMS/*.rpm && \ - mkdir -p /tmp/LibreOffice/helppack_zh-CN && \ - wget -O /tmp/LibreOffice/helppack_zh-CN.tar.gz https://mirrors.cloud.tencent.com/libreoffice/libreoffice/stable/24.8.2/rpm/x86_64/LibreOffice_24.8.2_Linux_x86-64_rpm_helppack_zh-CN.tar.gz && \ - tar -zxvf /tmp/LibreOffice/helppack_zh-CN.tar.gz -C /tmp/LibreOffice/helppack_zh-CN/ --strip-components 1 && \ - yum -y install /tmp/LibreOffice/helppack_zh-CN/RPMS/*.rpm && \ - rm -rf /tmp/LibreOffice +RUN git clone --depth 1 https://git.libreoffice.org/core /tmp/libreoffice && \ + cd /tmp/libreoffice && chmod +x ./autogen.sh && bash -x ./autogen.sh && \ + make -j "$(nproc)" && make install && \ + rm -rf /tmp/libreoffice RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ diff --git a/dataprep-redis-langchain/meta.yml b/dataprep-redis-langchain/meta.yml index 079bce74ffbb5719b4156a40aee6ddce68072326..1d60ced31a9b377104b24da20c3cc7a65f5f341d 100644 --- a/dataprep-redis-langchain/meta.yml +++ b/dataprep-redis-langchain/meta.yml @@ -1,3 +1,2 @@ 0.3.4-redis5.1.1-oe2403lts: - path: dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: dataprep-redis-langchain/0.3.4-redis5.1.1/24.03-lts/Dockerfile \ No newline at end of file diff --git a/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile b/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile index 3935c5b2ba32efab2482be42ed0619e9fb005cf4..e6200f0d95711a2ba7de128fb5b0d394c5c0c5e6 100644 --- a/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile +++ b/embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile @@ -8,17 +8,23 @@ ARG ARCH="cpu" RUN yum update -y && \ yum install -y \ --setopt=install_weak_deps=False \ - python-pip python \ + python-pip python python3-devel \ + cmake gcc-c++ git \ shadow-utils \ mesa-libGL \ jemalloc-devel \ git -RUN useradd -m -s /bin/bash user && \ - mkdir -p /home/user && \ - chown -R user /home/user/ +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx -USER user +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl WORKDIR /home/user/ @@ -26,8 +32,13 @@ RUN git clone https://gitee.com/zhihang161013/GenAIComps.git && \ cp -r GenAIComps/comps . && \ rm -rf GenAIComps -RUN pip install --no-cache-dir --upgrade pip && \ - if [ ${ARCH} = "cpu" ]; then pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu; fi && \ +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +USER user + +RUN if [ ${ARCH} = "cpu" ]; then pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu; fi && \ pip install --no-cache-dir -r /home/user/comps/embeddings/tei/langchain/requirements.txt ENV PYTHONPATH=/usr/bin/python:/home/user diff --git a/embedding-tei-langchain/meta.yml b/embedding-tei-langchain/meta.yml index 945e7c910bdc132ad17a1f2617d85b93cdcaf6f5..1192bf09cd84c37ebb648b33d258821771ccc8c4 100644 --- a/embedding-tei-langchain/meta.yml +++ b/embedding-tei-langchain/meta.yml @@ -1,3 +1,2 @@ 0.3.3-oe2403lts: - path: embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: embedding-tei-langchain/0.3.3/24.03-lts/Dockerfile \ No newline at end of file diff --git a/llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile b/llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..093482a9a7e5022b2d6fb12eefc865593b4ef71e --- /dev/null +++ b/llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile @@ -0,0 +1,64 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +FROM openeuler/openeuler:22.03-lts + +RUN yum update -y && \ + yum install -y --setopt=install_weak_deps=False \ + python-pip python python3-devel \ + cmake gcc-c++ \ + shadow-utils \ + mesa-libGL \ + jemalloc-devel \ + git + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +USER user + +WORKDIR /home/user/ + +RUN git clone https://gitee.com/zhihang161013/GenAIComps.git && \ + cp -r GenAIComps/comps . && \ + rm -rf GenAIComps + +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl && \ + rm -rf /tmp/manifold + +RUN python3 -m pip install --no-cache-dir --upgrade pip && \ + python3 -m pip install --no-cache-dir \ + fastapi \ + huggingface_hub \ + langchain \ + langchain-huggingface \ + langchain-openai \ + langchain_community \ + langchainhub \ + opentelemetry-api \ + opentelemetry-exporter-otlp \ + opentelemetry-sdk \ + prometheus-fastapi-instrumentator \ + shortuuid \ + transformers \ + uvicorn + +RUN pip3 install docarray[full] +RUN git clone -b npu_support --depth 1 https://github.com/wangshuai09/vllm.git +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" python3 -m pip install -r ./vllm/requirements-build.txt +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" VLLM_TARGET_DEVICE="npu" python3 -m pip install ./vllm/ && \ + rm -rf ./vllm + +ENV PYTHONPATH=/usr/bin/python:/home/user +WORKDIR /home/user/comps/llms/text-generation/vllm/langchain + +ENTRYPOINT ["bash", "entrypoint.sh"] diff --git a/llm-vllm-langchain/meta.yml b/llm-vllm-langchain/meta.yml index 03e45bd4ca3c4b1dc88e68f2fb63588c64473ff8..003354fcc90d78325384af072d832a0fd2940038 100644 --- a/llm-vllm-langchain/meta.yml +++ b/llm-vllm-langchain/meta.yml @@ -1,3 +1,5 @@ 0.3.3-oe2403lts: path: llm-vllm-langchain/0.3.3/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + arch: x86_64 +0.3.3-npu-oe2203lts: + path: llm-vllm-langchain/0.3.3-npu/24.03-lts/Dockerfile \ No newline at end of file diff --git a/reranking-tei/1.0/24.03-lts/Dockerfile b/reranking-tei/1.0/24.03-lts/Dockerfile index dcc6331472800fed9fe525d0ee860f765fe396d0..d99986276836664e388477561c29f65895cae244 100644 --- a/reranking-tei/1.0/24.03-lts/Dockerfile +++ b/reranking-tei/1.0/24.03-lts/Dockerfile @@ -7,12 +7,25 @@ ARG ARCH="cpu" RUN yum update -y && \ yum install -y --setopt=install_weak_deps=False \ - python-pip python \ + python-pip python python3-devel \ + cmake gcc-c++ git \ shadow-utils \ mesa-libGL \ jemalloc-devel \ git +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl && \ + rm -rf /tmp/manifold + RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/reranking-tei/meta.yml b/reranking-tei/meta.yml index 39a54523c0426498bcd846e40094accab095e1e9..3423455ea6c4b53fcbd7cd3afbe15425e96cfc86 100644 --- a/reranking-tei/meta.yml +++ b/reranking-tei/meta.yml @@ -1,3 +1,2 @@ 1.0-oe2403lts: - path: reranking-tei/1.0/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: reranking-tei/1.0/24.03-lts/Dockerfile \ No newline at end of file diff --git a/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile b/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile index bb309c9c32b1184a3aa448b71bdc685e40d74aed..72fc226d4f1319624c46e93c0630fe4c4a27c9b4 100644 --- a/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile +++ b/retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile @@ -7,12 +7,25 @@ ARG ARCH="cpu" RUN yum update -y && \ yum install -y --setopt=install_weak_deps=False \ - python-pip python \ + python-pip python python3-devel \ + cmake gcc-c++ git \ shadow-utils \ mesa-libGL \ jemalloc-devel \ git +RUN cd /tmp && git clone --depth 1 https://github.com/trimesh/vhacdx && cd vhacdx && \ + CFLAGS="-I /tmp/vhacdx" pip install . && \ + rm -rf /tmp/vhacdx + +RUN python -m pip install scikit-build-core numpy && \ + cd /tmp && git clone --depth 1 https://github.com/elalish/manifold.git && \ + cd manifold && git clone --depth 1 --branch v2021.10.0 https://github.com/oneapi-src/oneTBB.git && \ + python -m pip install --upgrade pip setuptools wheel build nanobind ninja && \ + python -m build --no-isolation --sdist --wheel && \ + python -m pip install dist/manifold3d-*.whl && \ + rm -rf /tmp/manifold + RUN useradd -m -s /bin/bash user && \ mkdir -p /home/user && \ chown -R user /home/user/ diff --git a/retriever-redis-langchain/meta.yml b/retriever-redis-langchain/meta.yml index f07b1cb62ef111d2185d8b1ad8f9edc02028dbfb..b491223f46cbbde3cba3a64f91747cecbb85ffc4 100644 --- a/retriever-redis-langchain/meta.yml +++ b/retriever-redis-langchain/meta.yml @@ -1,3 +1,2 @@ 0.3.3-redis5.1.1-oe2403lts: - path: retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: retriever-redis-langchain/0.3.3-redis5.1.1/24.03-lts/Dockerfile \ No newline at end of file diff --git a/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile b/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile index 7fb6efd4f35dbdb848fc0fe838f39fb4e89e2fa2..79c5e9f53751c6cfb1da795662096ea9ea43151e 100644 --- a/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile +++ b/text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile @@ -1,13 +1,12 @@ FROM openeuler/openeuler:24.03-lts AS chef +ARG TARGETARCH +ARG BUILDARCH WORKDIR /usr/src - -ENV SCCACHE=0.5.4 -ENV RUSTC_WRAPPER=/usr/local/bin/sccache - RUN yum update -y && \ yum install -y \ --setopt=install_weak_deps=False \ openssl-devel \ + ca-certificates \ gcc \ g++ \ git @@ -16,62 +15,46 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -o rustup-init && chmod +x rustup-init && \ ./rustup-init -y && \ rm rustup-init - ENV PATH="/root/.cargo/bin:$PATH" - -# Donwload, configure sccache -RUN curl -fsSL https://github.com/mozilla/sccache/releases/download/v$SCCACHE/sccache-v$SCCACHE-x86_64-unknown-linux-musl.tar.gz | tar -xzv --strip-components=1 -C /usr/local/bin sccache-v$SCCACHE-x86_64-unknown-linux-musl/sccache && \ - chmod +x /usr/local/bin/sccache - RUN cargo install cargo-chef --locked FROM chef AS planner - RUN git clone https://github.com/huggingface/text-embeddings-inference.git - WORKDIR /usr/src/text-embeddings-inference - RUN cargo chef prepare --recipe-path recipe.json FROM chef AS builder - +RUN yum update -y && \ + yum install -y \ + --setopt=install_weak_deps=False \ + ca-certificates \ + openssl-devel ARG GIT_SHA ARG DOCKER_LABEL - -# sccache specific variables ARG ACTIONS_CACHE_URL ARG ACTIONS_RUNTIME_TOKEN -ARG SCCACHE_GHA_ENABLED - COPY --from=planner /usr/src/text-embeddings-inference/recipe.json recipe.json - -RUN cargo chef cook --release --features ort --no-default-features --recipe-path recipe.json && sccache -s - +RUN cargo chef cook --release --features ort --no-default-features --recipe-path recipe.json RUN git clone https://github.com/huggingface/text-embeddings-inference.git FROM builder AS http-builder WORKDIR /usr/src/text-embeddings-inference - -RUN cargo build --release --bin text-embeddings-router -F ort -F http --no-default-features && sccache -s +RUN cargo build --release --bin text-embeddings-router -F ort -F http --no-default-features FROM builder AS grpc-builder WORKDIR /usr/src/text-embeddings-inference - -RUN PROTOC_ZIP=protoc-21.12-linux-x86_64.zip && \ +RUN PROTOC_ARCH=$(if [ "$TARGETARCH" = "amd64" ]; then echo "x86_64"; elif [ "$TARGETARCH" = "arm64" ]; then echo "aarch64"; fi) && \ + PROTOC_ZIP=protoc-21.12-linux-$PROTOC_ARCH.zip && \ curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v21.12/$PROTOC_ZIP && \ unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \ rm -f $PROTOC_ZIP - COPY proto proto - -RUN cargo build --release --bin text-embeddings-router -F grpc -F ort --no-default-features && sccache -s +RUN cargo build --release --bin text-embeddings-router -F grpc -F ort --no-default-features FROM openeuler/openeuler:24.03-lts AS base - ENV HUGGINGFACE_HUB_CACHE=/data \ PORT=80 - RUN yum update -y && \ yum install -y \ --setopt=install_weak_deps=False \ @@ -79,24 +62,21 @@ RUN yum update -y && \ openssl-devel FROM base AS grpc - -COPY --from=grpc-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router /usr/local/bin/text-embeddings-router - +COPY --from=grpc-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router \ +/usr/local/bin/text-embeddings-router ENTRYPOINT ["text-embeddings-router"] CMD ["--json-output"] FROM base AS http - -COPY --from=http-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router /usr/local/bin/text-embeddings-router +COPY --from=http-builder /usr/src/text-embeddings-inference/target/release/text-embeddings-router \ +/usr/local/bin/text-embeddings-router # Amazon SageMaker compatible image FROM http AS sagemaker COPY --chmod=775 sagemaker-entrypoint.sh entrypoint.sh - ENTRYPOINT ["./entrypoint.sh"] # Default image FROM http - ENTRYPOINT ["text-embeddings-router"] CMD ["--json-output"] diff --git a/text-embeddings-inference-cpu/meta.yml b/text-embeddings-inference-cpu/meta.yml index fa1a239ed9410c9271a703e02abaf250c52eff3f..0677c9098f9a00143ec1e49373366c17c93c38f8 100644 --- a/text-embeddings-inference-cpu/meta.yml +++ b/text-embeddings-inference-cpu/meta.yml @@ -1,3 +1,2 @@ 1.5-oe2403lts: - path: text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + path: text-embeddings-inference-cpu/1.5/24.03-lts/Dockerfile \ No newline at end of file diff --git a/vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile b/vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..8b15ddf08064c43d0042dfaae4a13067334fe762 --- /dev/null +++ b/vllm/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile @@ -0,0 +1,11 @@ +FROM ascendai/cann:8.0.rc3.alpha002-910b-openeuler22.03-py3.9 +RUN dnf update -y && \ + dnf install -y python3-pip git +WORKDIR /workspace +RUN git clone -b npu_support --depth 1 https://github.com/GuangJie1/vllm.git +# install build requirements +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" python3 -m pip install -r /workspace/vllm/requirements-build.txt +# build vLLM with NPU backend +RUN PIP_EXTRA_INDEX_URL="https://download.pytorch.org/whl/cpu" VLLM_TARGET_DEVICE="npu" python3 -m pip install /workspace/vllm/ +ENV HF_ENDPOINT=https://hf-mirror.com +ENTRYPOINT ["bash", "-c", "source /root/.bashrc && exec python3 -m vllm.entrypoints.openai.api_server \"$@\"", "--"] \ No newline at end of file diff --git a/vllm/meta.yml b/vllm/meta.yml index f7f8162028e7d34ed1f626018229803967a73bde..af53a6a7278252bba652f7b8816aa48d695805e2 100644 --- a/vllm/meta.yml +++ b/vllm/meta.yml @@ -1,3 +1,5 @@ 0.6.3-oe2403lts: path: vllm/0.6.3/24.03-lts/Dockerfile - arch: x86_64 \ No newline at end of file + arch: x86_64 +0.6.3-cann8.0.rc3.alpha002-910b-py3.9-oe2203lts: + path: vllm-npu-openai/0.6.3-cann8.0.rc3.alpha002-910b-py3.9/22.03-lts/Dockerfile \ No newline at end of file