diff --git a/AI/image-list.yml b/AI/image-list.yml index c2f6ddaa742217f21df43cced22c18315b962de7..373fef19595217651287d27514f59cb4ea9cddf5 100644 --- a/AI/image-list.yml +++ b/AI/image-list.yml @@ -1,9 +1,5 @@ images: - multimodalqna: opea/multimodalqna - multimodalqna-ui: opea/multimodalqna-ui - lvm-llava: opea/lvm-llava - embedding-multimodal-bridgetower: opea/embedding-multimodal-bridgetower - comps-base: opea/comps-base + codegen-gradio-ui: opea/codegen-gradio-ui kserve-huggingfaceserver: kserve/huggingfaceserver kserve-controller: kserve/controller kserve-agent: kserve/agent diff --git a/AI/opea/codegen-gradio-ui/1.3/24.03-lts/Dockerfile b/AI/opea/codegen-gradio-ui/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..d229a0bac90fe75e5e10ff3eaba8b081c8b0f249 --- /dev/null +++ b/AI/opea/codegen-gradio-ui/1.3/24.03-lts/Dockerfile @@ -0,0 +1,40 @@ +ARG BASE=openeuler/python:3.11.13-oe2403lts +ARG VERSION=v1.3 + +FROM $BASE + +ENV LANG=C.UTF-8 + +ARG ARCH="cpu" + +RUN yum update -y && \ + yum install -y \ + gcc g++ make cmake \ + java-21-openjdk \ + mesa-libGL \ + jemalloc-devel \ + git \ + wget \ + xz && \ + yum clean all + +# Install ffmpeg static build +WORKDIR /root +RUN wget https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-amd64-static.tar.xz && \ + mkdir ffmpeg-git-amd64-static && tar -xvf ffmpeg-git-amd64-static.tar.xz -C ffmpeg-git-amd64-static --strip-components 1 && \ + export PATH=/root/ffmpeg-git-amd64-static:$PATH && \ + cp /root/ffmpeg-git-amd64-static/ffmpeg /usr/local/bin/ && \ + cp /root/ffmpeg-git-amd64-static/ffprobe /usr/local/bin/ + +RUN mkdir -p /home/user + +ARG VERSION +ARG GENAIEXAMPLES_REPO=https://github.com/opea-project/GenAIExamples.git +RUN git clone -b $VERSION $GENAIEXAMPLES_REPO && \ + cp -r GenAIExamples/CodeGen/ui/gradio /home/user/gradio + +RUN pip install --no-cache-dir --upgrade pip setuptools && \ +pip install --no-cache-dir -r /home/user/gradio/requirements.txt + +WORKDIR /home/user/gradio +ENTRYPOINT ["python", "codegen_ui_gradio.py"] \ No newline at end of file diff --git a/AI/opea/codegen-gradio-ui/meta.yml b/AI/opea/codegen-gradio-ui/meta.yml new file mode 100644 index 0000000000000000000000000000000000000000..19cc377136856d9ab6d7cbd371393ba7187c0c10 --- /dev/null +++ b/AI/opea/codegen-gradio-ui/meta.yml @@ -0,0 +1,3 @@ +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile + arch: x86_64 \ No newline at end of file diff --git a/AI/opea/codegen/1.3/24.03-lts/Dockerfile b/AI/opea/codegen/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..bf91664d87b6b233461b17964f0c669ad6f8a33d --- /dev/null +++ b/AI/opea/codegen/1.3/24.03-lts/Dockerfile @@ -0,0 +1,14 @@ +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +ARG BASE_TAG=1.3 +FROM openeuler/comps-base:$BASE_TAG-oe2403lts + +ARG BASE_TAG +ARG GENAIEXAMPLES_REPO=https://github.com/opea-project/GenAIExamples.git +RUN git clone -b v$BASE_TAG $GENAIEXAMPLES_REPO && \ + cp -r GenAIExamples/CodeGen/codegen.py $HOME/codegen.py && \ + rm -rf GenAIExamples + + +ENTRYPOINT ["python", "codegen.py"] \ No newline at end of file diff --git a/AI/opea/codegen/README.md b/AI/opea/codegen/README.md index f0b2d8bfd6c0bdb80f39a5f3e2ea7904cb817e2f..c601d191a81224841b079809964506d1a17fa5ba 100644 --- a/AI/opea/codegen/README.md +++ b/AI/opea/codegen/README.md @@ -30,6 +30,7 @@ The tag of each CodeGen docker image is consist of the version of CodeGen and th |--|--|--| |[1.0-oe2403lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/opea/codegen/1.0/24.03-lts/Dockerfile)| CodeGen 1.0 on openEuler 24.03-LTS | amd64 | |[1.2-oe2403lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/opea/codegen/1.2/24.03-lts/Dockerfile)| CodeGen 1.2 on openEuler 24.03-LTS | amd64 | +|[1.3-oe2403lts](https://gitee.com/openeuler/openeuler-docker-images/blob/master/AI/opea/codegen/1.3/24.03-lts/Dockerfile)| CodeGen 1.3 on openEuler 24.03-LTS | amd64 | # Usage diff --git a/AI/opea/codegen/meta.yml b/AI/opea/codegen/meta.yml index 0e6dfdc00ec532336b9942f33ee6565fe22109d0..ec64504554f22633aa0d996b507f6057de243476 100644 --- a/AI/opea/codegen/meta.yml +++ b/AI/opea/codegen/meta.yml @@ -4,4 +4,8 @@ 1.2-oe2403lts: path: 1.2/24.03-lts/Dockerfile + arch: x86_64 + +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile arch: x86_64 \ No newline at end of file diff --git a/AI/opea/llm-textgen/1.3/24.03-lts/Dockerfile b/AI/opea/llm-textgen/1.3/24.03-lts/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..fa42a6df5d5176694b1e89da5760afc5918f81da --- /dev/null +++ b/AI/opea/llm-textgen/1.3/24.03-lts/Dockerfile @@ -0,0 +1,34 @@ +ARG BASE=openeuler/python:3.11.13-oe2403lts +ARG VERSION=v1.3 + +FROM $BASE + +RUN yum update -y && \ + yum install -y \ + mesa-libGL \ + jemalloc-devel \ + shadow \ + git && \ + yum clean all + +RUN useradd -m -s /bin/bash user && \ + mkdir -p /home/user && \ + chown -R user /home/user/ + +ARG VERSION +ARG GENAICOMPS_REPO=https://github.com/opea-project/GenAIComps.git +RUN git clone -b $VERSION $GENAICOMPS_REPO && \ + cp -r GenAIComps/comps /home/user/comps && \ + rm -rf GenAIComps + + +RUN pip install --no-cache-dir --upgrade pip setuptools && \ + pip install --no-cache-dir -r /home/user/comps/llms/src/text-generation/requirements.txt + +ENV PYTHONPATH=$PYTHONPATH:/home/user + +USER user + +WORKDIR /home/user/comps/llms/src/text-generation + +ENTRYPOINT ["bash", "entrypoint.sh"] \ No newline at end of file diff --git a/AI/opea/llm-textgen/meta.yml b/AI/opea/llm-textgen/meta.yml index ee4b49e9afa63ff59b2371473ff499d093b30722..2ab4c3bec8ff02a3857cd9799cb1a5c043a62587 100644 --- a/AI/opea/llm-textgen/meta.yml +++ b/AI/opea/llm-textgen/meta.yml @@ -1,3 +1,7 @@ 1.2-oe2403lts: path: 1.2/24.03-lts/Dockerfile + arch: x86_64 + +1.3-oe2403lts: + path: 1.3/24.03-lts/Dockerfile arch: x86_64 \ No newline at end of file