From 54dfaa5dd6bc2720cacc511e7d13c2ac8f39fe64 Mon Sep 17 00:00:00 2001 From: liu lili Date: Tue, 2 Sep 2025 15:15:36 +0800 Subject: [PATCH 1/3] lll: solve model infer docs --- .../model_infer/ms_infer/ms_infer_model_serving_infer.md | 8 ++++---- .../model_infer/ms_infer/ms_infer_model_serving_infer.md | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md b/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md index 486eb3f504..5f56a9a52c 100644 --- a/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md +++ b/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md @@ -69,7 +69,7 @@ bash install_depend_pkgs.sh python setup.py install ``` -After the vLLM-MindSpore Plugin operating environment is created, you need to install the following dependency packages: +The main dependences packages of vLLM-MindSpore Plugin are listed in follow: - **mindspore**: MindSpore development framework, which is the basis for model running. @@ -89,7 +89,7 @@ The service-oriented vLLM-MindSpore Plugin supports the direct running of the na ```shell git lfs install -git clone https://huggingface.co/Qwen/Qwen2-7B-Instruct +git clone https://huggingface.co/Qwen/Qwen2-7B ``` If `git lfs install` fails during the pull process, refer to the vLLM-MindSpore Plugin [FAQ](https://www.mindspore.cn/vllm_mindspore/docs/en/master/faqs/faqs.html) for a solution. @@ -119,7 +119,7 @@ export VLLM_HTTP_PORT=8080 unset vLLM_MODEL_BACKEND # model envs -export MODEL_ID="/path/to/model/Qwen2-7B-Instruct" +export MODEL_ID="/path/to/model/Qwen2-7B" ``` Run the following command to start the vLLM-MindSpore Plugin service backend: @@ -145,7 +145,7 @@ After receiving the inference request, the service backend calculates and return "id":"cmpl-1c30caf453154b5ab4a579b7b06cea19", "object":"text_completion", "created":1754103773, - "model":"/path/to/model/Qwen2-7B-Instruct", + "model":"/path/to/model/Qwen2-7B", "choices":[ { "index":0, diff --git a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md index 81c135b13e..92de7e253d 100644 --- a/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md +++ b/tutorials/source_zh_cn/model_infer/ms_infer/ms_infer_model_serving_infer.md @@ -69,7 +69,7 @@ bash install_depend_pkgs.sh python setup.py install ``` -vLLM-MindSpore插件的运行环境创建后,还需要安装以下依赖包: +vLLM-MindSpore插件主要依赖的组件包含如下: - **mindspore**:MindSpore开发框架,模型运行基础。 @@ -89,7 +89,7 @@ vLLM-MindSpore插件服务化支持原生Hugging Face的模型直接运行,因 ```shell git lfs install -git clone https://huggingface.co/Qwen/Qwen2-7B-Instruct +git clone https://huggingface.co/Qwen/Qwen2-7B ``` 若在拉取过程中,执行`git lfs install失败`,可以参考vLLM-MindSpore插件 [FAQ](https://www.mindspore.cn/vllm_mindspore/docs/zh-CN/master/faqs/faqs.html) 进行解决。 @@ -119,7 +119,7 @@ export VLLM_HTTP_PORT=8080 unset vLLM_MODEL_BACKEND # model envs -export MODEL_ID="/path/to/model/Qwen2-7B-Instruct" +export MODEL_ID="/path/to/model/Qwen2-7B" ``` 执行如下命令可以启动vLLM-MindSpore插件的服务后端。 @@ -145,7 +145,7 @@ curl http://${VLLM_MASTER_IP}:${VLLM_HTTP_PORT}/v1/completions -H "Content-Type: "id":"cmpl-1c30caf453154b5ab4a579b7b06cea19", "object":"text_completion", "created":1754103773, - "model":"/path/to/model/Qwen2-7B-Instruct", + "model":"/path/to/model/Qwen2-7B", "choices":[ { "index":0, -- Gitee From 0fba6f93f80757ad69c5507ce1806a5069426bef Mon Sep 17 00:00:00 2001 From: liu lili Date: Tue, 2 Sep 2025 16:41:47 +0800 Subject: [PATCH 2/3] lll: solve model infer docs --- .../model_infer/ms_infer/ms_infer_model_serving_infer.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md b/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md index 5f56a9a52c..0f1cce8178 100644 --- a/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md +++ b/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md @@ -69,7 +69,7 @@ bash install_depend_pkgs.sh python setup.py install ``` -The main dependences packages of vLLM-MindSpore Plugin are listed in follow: +The main dependency packages of vLLM-MindSpore Plugin are listed in follow: - **mindspore**: MindSpore development framework, which is the basis for model running. -- Gitee From b163c5d1a7c81baaaaa63798b58616ea7ecd4184 Mon Sep 17 00:00:00 2001 From: liu lili Date: Tue, 2 Sep 2025 16:44:06 +0800 Subject: [PATCH 3/3] lll: solve model infer docs --- .../model_infer/ms_infer/ms_infer_model_serving_infer.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md b/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md index 0f1cce8178..0a5619cb8e 100644 --- a/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md +++ b/tutorials/source_en/model_infer/ms_infer/ms_infer_model_serving_infer.md @@ -69,7 +69,7 @@ bash install_depend_pkgs.sh python setup.py install ``` -The main dependency packages of vLLM-MindSpore Plugin are listed in follow: +The main package dependencies of vLLM-MindSpore Plugin are listed in follow: - **mindspore**: MindSpore development framework, which is the basis for model running. -- Gitee