diff --git a/AI/opea/chatqna-conversation-ui/1.3/24.03-lts/Dockerfile b/AI/opea/chatqna-conversation-ui/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..cf73dfa27a8a55f6352f6dcaff675b08761d79da --- /dev/null +++ b/AI/opea/chatqna-conversation-ui/1.3/24.03-lts/Dockerfile @@ -0,0 +1,33 @@ +ARG BASE=openeuler/node:20.11.1-oe2403lts +ARG VERSION=v1.3 + +# Use node 20.11.1 as the base image +FROM $BASE as vite-app + +ARG VERSION + +RUN yum update -y && \ + yum install -y \ + git && \ + yum clean all + +ARG GENAIEXAMPLES_REPO=https://github.com/opea-project/GenAIExamples.git +RUN git clone -b $VERSION $GENAIEXAMPLES_REPO && \ + mkdir -p /usr/app && \ + cp -r GenAIExamples/ChatQnA/ui/react /usr/app/react && \ + rm -rf GenAIExamples + +WORKDIR /usr/app/react + + +RUN ["npm", "install"] +RUN ["npm", "run", "build"] + + +FROM openeuler/nginx:1.29.0-oe2403lts + +COPY --from=vite-app /usr/app/react/dist /usr/share/nginx/html +COPY --from=vite-app /usr/app/react/env.sh /docker-entrypoint.d/env.sh + +COPY --from=vite-app /usr/app/react/nginx.conf /etc/nginx/conf.d/default.conf +RUN chmod +x /docker-entrypoint.d/env.sh \ No newline at end of file diff --git a/AI/opea/chatqna-conversation-ui/meta.yml b/AI/opea/chatqna-conversation-ui/meta.yml index 55f9e33c3012e71d7d18b43ba328c1fc4595cd5f..ec64504554f22633aa0d996b507f6057de243476 100644 --- a/AI/opea/chatqna-conversation-ui/meta.yml +++ b/AI/opea/chatqna-conversation-ui/meta.yml @@ -1,3 +1,11 @@ 1.0-oe2403lts: path: 1.0/24.03-lts/Dockerfile arch: x86_64 + +1.2-oe2403lts: + path: 1.2/24.03-lts/Dockerfile + arch: x86_64 + +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file diff --git a/AI/opea/chatqna-ui/1.3/24.03-lts/Dockerfile b/AI/opea/chatqna-ui/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..0bbb6f1880e78c746e070f386b7385f45b654de4 --- /dev/null +++ b/AI/opea/chatqna-ui/1.3/24.03-lts/Dockerfile @@ -0,0 +1,32 @@ +ARG VERSION=v1.3 +ARG BASE=openeuler/node:20.11.1-oe2403lts +# Use node 20.11.1 as the base image +FROM $BASE +ARG VERSION +# Update package manager and install Git +RUN yum update -y && \ + yum install -y git && \ + yum clean all && \ + rm -rf /var/cache/yum + +# Copy the front-end code repository +ARG GENAIEXAMPLES_REPO=https://github.com/opea-project/GenAIExamples.git +RUN git clone -b $VERSION $GENAIEXAMPLES_REPO && \ + mkdir -p /home/user && \ + cp -r GenAIExamples/ChatQnA/ui/svelte /home/user/svelte && \ + rm -rf GenAIExamples + +# Set the working directory +WORKDIR /home/user/svelte + +# Install front-end dependencies +RUN npm install + +# Build the front-end application +RUN npm run build + +# Expose the port of the front-end application +EXPOSE 5173 + +# Run the front-end application in preview mode +CMD ["npm", "run", "preview", "--", "--port", "5173", "--host", "0.0.0.0"] \ No newline at end of file diff --git a/AI/opea/chatqna-ui/meta.yml b/AI/opea/chatqna-ui/meta.yml index 0e6dfdc00ec532336b9942f33ee6565fe22109d0..ec64504554f22633aa0d996b507f6057de243476 100644 --- a/AI/opea/chatqna-ui/meta.yml +++ b/AI/opea/chatqna-ui/meta.yml @@ -4,4 +4,8 @@ 1.2-oe2403lts: path: 1.2/24.03-lts/Dockerfile + arch: x86_64 + +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile arch: x86_64 \ No newline at end of file diff --git a/AI/opea/chatqna/1.3/24.03-lts/Dockerfile b/AI/opea/chatqna/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..1e73a669d0efe5638046c8f953cfadc3f8a37ea1 --- /dev/null +++ b/AI/opea/chatqna/1.3/24.03-lts/Dockerfile @@ -0,0 +1,13 @@ +ARG IMAGE_REPO=openeuler +ARG BASE_TAG=1.3 +FROM $IMAGE_REPO/comps-base:$BASE_TAG-oe2403lts + +ARG BASE_TAG + +ARG GENAIEXAMPLES_REPO=https://github.com/opea-project/GenAIExamples.git +RUN git clone -b v$BASE_TAG $GENAIEXAMPLES_REPO && \ + cp -r GenAIExamples/ChatQnA/chatqna.py $HOME/chatqna.py && \ + cp -r GenAIExamples/ChatQnA/entrypoint.sh $HOME/entrypoint.sh && \ + rm -rf GenAIExamples + +ENTRYPOINT ["bash", "entrypoint.sh"] \ No newline at end of file diff --git a/AI/opea/chatqna/meta.yml b/AI/opea/chatqna/meta.yml index 0343efe1949d963f2ec255875c881c6161802a33..05b30cd35b1180e5895f9bf894bd9446571d0afb 100644 --- a/AI/opea/chatqna/meta.yml +++ b/AI/opea/chatqna/meta.yml @@ -5,3 +5,7 @@ 1.2-oe2403lts: path: 1.2/24.03-lts/Dockerfile arch: x86_64 + +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile + arch: x86_64 diff --git a/AI/opea/dataprep/1.3/24.03-lts/Dockerfile b/AI/opea/dataprep/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..18f21097cb3858d02d69f82342603e41d2e44e28 --- /dev/null +++ b/AI/opea/dataprep/1.3/24.03-lts/Dockerfile @@ -0,0 +1,75 @@ +# Copyright (C) 2025 Huawei Technologies Co., Ltd. +# SPDX-License-Identifier: Apache-2.0 +ARG BASE=openeuler/python:3.11.13-oe2403lts +ARG VERSION=v1.3 + +FROM $BASE + +ARG VERSION + +ENV LANG=C.UTF-8 + +ARG ARCH="cpu" + +RUN yum update -y && yum install -y \ + gcc g++ make cmake \ + java-1.8.0-openjdk \ + cairo \ + libxslt \ + mesa-libGL \ + jemalloc-devel \ + mariadb-connector-c-devel-3.3.8 \ + libpq-devel \ + poppler-utils \ + tesseract-tools \ + ffmpeg \ + wget curl \ + shadow \ + git && \ + yum clean all && \ + rm -rf /var/cache/yum + +ENV TESSDATA_PREFIX=/usr/share/tesseract/tessdata + +RUN LIBREOFFICE_URL=https://mirrors.tuna.tsinghua.edu.cn/libreoffice/libreoffice/stable/25.2.4/rpm/x86_64/LibreOffice_25.2.4_Linux_x86-64_rpm.tar.gz && \ + wget $LIBREOFFICE_URL -O /tmp/libreOffice.tar.gz && \ + tar --strip-components=1 -xvf /tmp/libreOffice.tar.gz -C /tmp && \ + yum install -y /tmp/RPMS/*.rpm && \ + yum clean all && \ + rm -fr /tmp/libreOffice.tar.gz /tmp/RPMS && \ + ln -sf /usr/bin/libreoffice25.2 /usr/bin/libreoffice + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +ARG GENAICOMPS_REPO=https://github.com/opea-project/GenAIComps.git +RUN git clone -b $VERSION $GENAICOMPS_REPO && \ + cp -r GenAIComps/comps /home/user/comps && \ + rm -rf GenAIComps + +ARG uvpip='uv pip install --system --no-cache-dir' +RUN pip install --no-cache-dir --upgrade pip setuptools uv && \ + if [ ${ARCH} = "cpu" ]; then \ + PIP_EXTRA_INDEX_URL="--extra-index-url https://download.pytorch.org/whl/cpu"; \ + else \ + PIP_EXTRA_INDEX_URL=""; \ + fi && \ + $uvpip torch torchvision ${PIP_EXTRA_INDEX_URL} && \ + $uvpip -r /home/user/comps/dataprep/src/requirements.txt + +ENV PYTHONPATH=$PYTHONPATH:/home/user + +RUN mkdir -p /home/user/comps/dataprep/src/uploaded_files && chown -R user /home/user/comps/dataprep/src/uploaded_files +RUN mkdir -p /data && chown -R user /data + +USER user +ENV NLTK_DATA=/home/user/nltk_data +# air gapped support: predownload all needed nltk data +RUN mkdir -p /home/user/nltk_data && python -m nltk.downloader -d /home/user/nltk_data punkt_tab averaged_perceptron_tagger_eng stopwords +# air gapped support: set model cache dir +ENV HF_HUB_CACHE=/data + +WORKDIR /home/user/comps/dataprep/src + +ENTRYPOINT ["sh", "-c", "python $( [ \"$MULTIMODAL_DATAPREP\" = \"true\" ] && echo 'opea_dataprep_multimodal_microservice.py' || echo 'opea_dataprep_microservice.py')"] \ No newline at end of file diff --git a/AI/opea/dataprep/meta.yml b/AI/opea/dataprep/meta.yml index 2d855cefca21cfe09a49183738c014e3571c06a7..2ab4c3bec8ff02a3857cd9799cb1a5c043a62587 100644 --- a/AI/opea/dataprep/meta.yml +++ b/AI/opea/dataprep/meta.yml @@ -1,3 +1,7 @@ 1.2-oe2403lts: path: 1.2/24.03-lts/Dockerfile arch: x86_64 + +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file