diff --git a/AI/opea/dataprep-redis/1.2/24.03-lts/Dockerfile b/AI/opea/dataprep-redis/1.2/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..a4efa7bc78519676e5935959dda7b6fcaf4c0c32 --- /dev/null +++ b/AI/opea/dataprep-redis/1.2/24.03-lts/Dockerfile @@ -0,0 +1,74 @@ +# docker build -t openeuler_opea/dataprep:yunfei . +# Base image +FROM openeuler/openeuler:24.03-lts + +#安装python3.11 +RUN yum update -y && \ + yum install -y \ + python3.11 \ + python3-devel \ + python3-pip \ + git \ + wget && \ + yum clean all + +ENV LANG=C.UTF-8 +ARG ARCH="cpu" + +# 安装服务依赖软件 +RUN yum update -y && yum install -y \ + gcc \ + gcc-c++ \ + java-1.8.0-openjdk \ + cairo \ + mesa-libGL \ + jemalloc-devel \ + postgresql-devel \ + poppler-utils \ + tesseract \ + curl && \ + ln -s /usr/bin/python3.11 /usr/bin/python && \ + yum clean all + +# Install LibreOffice +RUN wget https://mirrors.bfsu.edu.cn/libreoffice/libreoffice/stable/24.8.6/rpm/x86_64/LibreOffice_24.8.6_Linux_x86-64_rpm.tar.gz && \ + tar -xvf LibreOffice_24.8.6_Linux_x86-64_rpm.tar.gz && \ + yum install -y ./LibreOffice_24.8.6.2_Linux_x86-64_rpm/RPMS/*.rpm && \ + cd / && rm -rf LibreOffice_24.8.6.2_Linux_x86-64_rpm* && rm LibreOffice_24.8.6_Linux_x86-64_rpm.tar.gz + +# Install ffmpeg static build +RUN yum install -y xz && \ +cd /root && wget https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz && \ +mkdir ffmpeg-git-amd64-static && tar -xvf ffmpeg-git-amd64-static.tar.xz -C ffmpeg-git-amd64-static --strip-components 1 && \ +export PATH=/root/ffmpeg-git-amd64-static:$PATH && \ +cp /root/ffmpeg-git-amd64-static/ffmpeg /usr/local/bin/ + + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +# COPY comps /home/user/comps +RUN git clone --depth=1 --branch v1.2 https://github.com/opea-project/GenAIComps.git && \ + cp -r GenAIComps/comps /home/user/comps && \ + rm -rf GenAIComps + +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + if [ ${ARCH} = "cpu" ]; then \ + PIP_EXTRA_INDEX_URL="--extra-index-url https://download.pytorch.org/whl/cpu"; \ + else \ + PIP_EXTRA_INDEX_URL=""; \ + fi && \ + pip install --no-cache-dir torch torchvision ${PIP_EXTRA_INDEX_URL} && \ + pip install --no-cache-dir ${PIP_EXTRA_INDEX_URL} -r /home/user/comps/dataprep/src/requirements.txt && \ + pip install opentelemetry-api==1.29.0 opentelemetry-exporter-otlp==1.29.0 opentelemetry-sdk==1.29.0 + +ENV PYTHONPATH=/usr/bin/python3:/home/user + +RUN mkdir -p /home/user/comps/dataprep/src/uploaded_files && chown -R user /home/user/comps/dataprep/src/uploaded_files + +USER user + +WORKDIR /home/user/comps/dataprep/src + +ENTRYPOINT ["sh", "-c", "python $( [ \"$MULTIMODAL_DATAPREP\" = \"true\" ] && echo 'opea_dataprep_multimodal_microservice.py' || echo 'opea_dataprep_microservice.py')"] diff --git a/AI/opea/dataprep-redis/meta.yml b/AI/opea/dataprep-redis/meta.yml index 55f9e33c3012e71d7d18b43ba328c1fc4595cd5f..8aff098c5afd101c200bfc2fffd2b500f6765787 100644 --- a/AI/opea/dataprep-redis/meta.yml +++ b/AI/opea/dataprep-redis/meta.yml @@ -1,3 +1,6 @@ 1.0-oe2403lts: path: 1.0/24.03-lts/Dockerfile arch: x86_64 +1.2-oe2403lts: + path: 1.2/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file diff --git a/AI/opea/doc-index-retriever/1.2/24.03-LTS/Dockerfile b/AI/opea/doc-index-retriever/1.2/24.03-LTS/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..78621fc7884d33a721a75c6b43f017c892c6aef0 --- /dev/null +++ b/AI/opea/doc-index-retriever/1.2/24.03-LTS/Dockerfile @@ -0,0 +1,56 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + + +# Stage 1: base setup used by other stages +FROM openeuler/openeuler:24.03-lts AS base + +RUN yum update -y && \ + yum install -y --setopt=install_weak_deps=False \ + python-pip python \ + shadow-utils \ + mesa-libGL \ + jemalloc-devel \ + git + + +ENV HOME=/home/user + +RUN useradd -m -s /bin/bash user && \ + mkdir -p $HOME && \ + chown -R user $HOME + +WORKDIR $HOME + + +# Stage 2: latest GenAIComps sources +FROM base AS git +RUN git clone --depth=1 --branch v1.2 https://github.com/opea-project/GenAIComps.git + +# Stage 3: common layer shared by services using GenAIComps +FROM base AS comps-base + +# copy just relevant parts +COPY --from=git $HOME/GenAIComps/comps $HOME/GenAIComps/comps +COPY --from=git $HOME/GenAIComps/*.* $HOME/GenAIComps/LICENSE $HOME/GenAIComps/ + +WORKDIR $HOME/GenAIComps +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r $HOME/GenAIComps/requirements.txt +WORKDIR $HOME + +ENV PYTHONPATH=$PYTHONPATH:/home/user/GenAIComps + + +USER user + + +# Stage 4: unique part +FROM comps-base + +RUN git clone --depth=1 --branch v1.2 https://github.com/opea-project/GenAIExamples.git && \ + cp GenAIExamples/DocIndexRetriever/retrieval_tool.py . && \ + rm -rf GenAIExamples + + +ENTRYPOINT ["python", "retrieval_tool.py"] diff --git a/AI/opea/doc-index-retriever/meta.yml b/AI/opea/doc-index-retriever/meta.yml index 4cae4dd4a3ff05f2037aaa54695b1cc970e797f2..8aff098c5afd101c200bfc2fffd2b500f6765787 100644 --- a/AI/opea/doc-index-retriever/meta.yml +++ b/AI/opea/doc-index-retriever/meta.yml @@ -1,3 +1,6 @@ 1.0-oe2403lts: path: 1.0/24.03-lts/Dockerfile + arch: x86_64 +1.2-oe2403lts: + path: 1.2/24.03-lts/Dockerfile arch: x86_64 \ No newline at end of file diff --git a/AI/opea/embedding-tei/1.2/24.03-lts/Dockerfile b/AI/opea/embedding-tei/1.2/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..dab4ece862fdf82068ad99edab640312ff9f31bd --- /dev/null +++ b/AI/opea/embedding-tei/1.2/24.03-lts/Dockerfile @@ -0,0 +1,32 @@ +# docker build -t openeuler_opea/embedding:yunfei . + + +# Base image +FROM openeuler/openeuler:24.03-lts + +#安装python3.11 +RUN yum update -y && \ + yum install -y \ + python3.11 \ + python3-devel \ + python3-pip \ + git \ + wget && \ + ln -s /usr/bin/python3.11 /usr/bin/python && \ + yum clean all + +RUN git clone --depth=1 --branch v1.2 https://github.com/opea-project/GenAIComps.git && \ + cp -r GenAIComps/comps /home/comps && \ + rm -rf GenAIComps + +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r /home/comps/embeddings/src/requirements.txt + +ENV PYTHONPATH=/usr/bin/python:/home + +WORKDIR /home/comps/embeddings/src/ + +ENV MULTIMODAL_EMBEDDING="false" + +ENTRYPOINT ["sh", "-c", "python $( [ \"$MULTIMODAL_EMBEDDING\" = \"true\" ] && echo 'opea_multimodal_embedding_microservice.py' || echo 'opea_embedding_microservice.py')"] + diff --git a/AI/opea/embedding-tei/meta.yml b/AI/opea/embedding-tei/meta.yml index 55f9e33c3012e71d7d18b43ba328c1fc4595cd5f..8aff098c5afd101c200bfc2fffd2b500f6765787 100644 --- a/AI/opea/embedding-tei/meta.yml +++ b/AI/opea/embedding-tei/meta.yml @@ -1,3 +1,6 @@ 1.0-oe2403lts: path: 1.0/24.03-lts/Dockerfile arch: x86_64 +1.2-oe2403lts: + path: 1.2/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file diff --git a/AI/opea/reranking-tei/1.2/24.03-lts/Dockerfile b/AI/opea/reranking-tei/1.2/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..ca97a2eedb95f8b8114e2b24e88766e98a1bfa6f --- /dev/null +++ b/AI/opea/reranking-tei/1.2/24.03-lts/Dockerfile @@ -0,0 +1,58 @@ +# docker build -t openeuler_opea/rerankings:yunfei . + + +# Base image +FROM openeuler/openeuler:24.03-lts +ENV LANG=C.UTF-8 +ARG ARCH="cpu" +ARG SERVICE="all" + +#安装python3.11 +RUN yum update -y && \ + yum install -y \ + python3.11 \ + python3-devel \ + python3-pip \ + git \ + wget && \ + ln -s /usr/bin/python3.11 /usr/bin/python && \ + yum clean all + +RUN yum update -y && yum install -y \ + libglvnd-glx \ + jemalloc-devel && \ + yum clean all + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +RUN git clone --depth=1 --branch v1.2 https://github.com/opea-project/GenAIComps.git && \ + cp -r GenAIComps/comps /home/user/comps && \ + rm -rf GenAIComps + +USER user + +RUN if [ ${ARCH} = "cpu" ]; then \ + pip install --no-cache-dir torch --index-url https://download.pytorch.org/whl/cpu; \ +fi && \ +if [ ${SERVICE} = "videoqna" ]; then \ + pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r /home/user/comps/rerankings/src/requirements_videoqna.txt; \ +elif [ ${SERVICE} = "all" ]; then \ + git clone https://github.com/IntelLabs/fastRAG.git /home/user/fastRAG && \ + cd /home/user/fastRAG && \ + pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir . && \ + pip install --no-cache-dir .[intel] && \ + pip install --no-cache-dir -r /home/user/comps/rerankings/src/requirements_videoqna.txt; \ +fi && \ +pip install --no-cache-dir --upgrade pip setuptools && \ +pip install --no-cache-dir -r /home/user/comps/rerankings/src/requirements.txt; + + +ENV PYTHONPATH=/usr/bin/python3:/home/user + +WORKDIR /home/user/comps/rerankings/src + +ENTRYPOINT ["python", "opea_reranking_microservice.py"] diff --git a/AI/opea/reranking-tei/meta.yml b/AI/opea/reranking-tei/meta.yml index 55f9e33c3012e71d7d18b43ba328c1fc4595cd5f..8aff098c5afd101c200bfc2fffd2b500f6765787 100644 --- a/AI/opea/reranking-tei/meta.yml +++ b/AI/opea/reranking-tei/meta.yml @@ -1,3 +1,6 @@ 1.0-oe2403lts: path: 1.0/24.03-lts/Dockerfile arch: x86_64 +1.2-oe2403lts: + path: 1.2/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file diff --git a/AI/opea/retriever-redis/1.2/24.03-lts/Dockerfile b/AI/opea/retriever-redis/1.2/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..a980c55a0cd248feea78800c06569bcc48af25ae --- /dev/null +++ b/AI/opea/retriever-redis/1.2/24.03-lts/Dockerfile @@ -0,0 +1,60 @@ +# docker build -t openeuler_opea/retrievers:yunfei . + +FROM openeuler/openeuler:24.03-lts + +ARG ARCH="cpu" + +# RUN apt-get update -y +# RUN apt-get install -y --no-install-recommends --fix-missing \ +# libcairo2 \ +# libgl1-mesa-glx \ +# libglib2.0-0 \ +# libjemalloc-dev +RUN yum update -y && \ + yum install -y --setopt=install_weak_deps=False \ + shadow-utils \ + git \ + wget \ + python3.11 \ + python3-devel \ + python3-pip \ + cairo \ + mesa-libGL \ + glib2 \ + vim \ + jemalloc-devel \ + gcc-c++ \ + gcc \ + make && \ + ln -s /usr/bin/python3.11 /usr/bin/python && \ + yum clean all + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +RUN git clone --depth=1 --branch v1.2 https://github.com/opea-project/GenAIComps.git && \ + cp -r GenAIComps/comps /home/user/comps && \ + rm -rf GenAIComps + + +USER user + +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + if [ ${ARCH} = "cpu" ]; then \ + PIP_EXTRA_INDEX_URL="--extra-index-url https://download.pytorch.org/whl/cpu"; \ + else \ + PIP_EXTRA_INDEX_URL=""; \ + fi && \ + pip install --no-cache-dir torch torchvision ${PIP_EXTRA_INDEX_URL} && \ + pip install --no-cache-dir ${PIP_EXTRA_INDEX_URL} -r /home/user/comps/retrievers/src/requirements.txt && \ + pip install opentelemetry-api==1.27.0 opentelemetry-exporter-otlp==1.27.0 opentelemetry-sdk==1.27.0 + +RUN pip install --no-cache-dir protobuf && \ + pip3 install --no-cache-dir pymilvus==2.5.0 + +ENV PYTHONPATH=/usr/bin/python:/home/user + +WORKDIR /home/user/comps/retrievers/src + +ENTRYPOINT ["python", "opea_retrievers_microservice.py"] diff --git a/AI/opea/retriever-redis/meta.yml b/AI/opea/retriever-redis/meta.yml index 55f9e33c3012e71d7d18b43ba328c1fc4595cd5f..8aff098c5afd101c200bfc2fffd2b500f6765787 100644 --- a/AI/opea/retriever-redis/meta.yml +++ b/AI/opea/retriever-redis/meta.yml @@ -1,3 +1,6 @@ 1.0-oe2403lts: path: 1.0/24.03-lts/Dockerfile arch: x86_64 +1.2-oe2403lts: + path: 1.2/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file