From 7aa46b581d5686ffbd5371205859368aaf6aab5c Mon Sep 17 00:00:00 2001 From: "mingjiang.li" Date: Fri, 25 Jul 2025 15:45:01 +0800 Subject: [PATCH 1/3] ignore .vscode dir Signed-off-by: mingjiang.li --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 464531f4..e21cd798 100644 --- a/.gitignore +++ b/.gitignore @@ -62,4 +62,7 @@ imagenet_val/ *.onnx *.pth *.engine -data/ \ No newline at end of file +data/ + +# IDE +.vscode/ -- Gitee From 3205ac9184cbd5585a55070ece70211b345e6b89 Mon Sep 17 00:00:00 2001 From: "mingjiang.li" Date: Fri, 25 Jul 2025 15:54:48 +0800 Subject: [PATCH 2/3] rename IxRT to be ixRT (official name) --- README.md | 30 +++++++++---------- README_en.md | 26 ++++++++-------- .../conformer/ixrt/README.md | 2 +- .../transformer_asr/ixrt/README.md | 2 +- .../cv/classification/alexnet/ixrt/README.md | 2 +- models/cv/classification/clip/ixrt/README.md | 2 +- .../convnext_base/ixrt/README.md | 2 +- .../convnext_small/ixrt/README.md | 2 +- .../cspdarknet53/ixrt/README.md | 2 +- .../classification/cspresnet50/ixrt/README.md | 2 +- .../cspresnext50/ixrt/README.md | 2 +- .../classification/deit_tiny/ixrt/README.md | 2 +- .../classification/densenet121/ixrt/README.md | 2 +- .../classification/densenet161/ixrt/README.md | 2 +- .../classification/densenet169/ixrt/README.md | 2 +- .../classification/densenet201/ixrt/README.md | 2 +- .../efficientnet_b0/ixrt/README.md | 2 +- .../efficientnet_b1/ixrt/README.md | 2 +- .../efficientnet_b2/ixrt/README.md | 2 +- .../efficientnet_b3/ixrt/README.md | 2 +- .../efficientnet_b4/ixrt/README.md | 2 +- .../efficientnet_b5/ixrt/README.md | 2 +- .../efficientnet_v2/ixrt/README.md | 2 +- .../efficientnet_v2_s/ixrt/README.md | 2 +- .../classification/googlenet/ixrt/README.md | 2 +- .../classification/hrnet_w18/ixrt/README.md | 2 +- .../inception_resnet_v2/ixrt/README.md | 2 +- .../inception_v3/ixrt/README.md | 2 +- .../mobilenet_v2/ixrt/README.md | 2 +- .../mobilenet_v3/ixrt/README.md | 2 +- .../cv/classification/repvgg/ixrt/README.md | 2 +- .../classification/res2net50/ixrt/README.md | 2 +- .../classification/resnet101/ixrt/README.md | 2 +- .../cv/classification/resnet18/ixrt/README.md | 2 +- .../cv/classification/resnet34/ixrt/README.md | 2 +- .../cv/classification/resnet50/ixrt/README.md | 2 +- .../classification/resnetv1d50/ixrt/README.md | 2 +- .../resnext101_32x8d/ixrt/README.md | 2 +- .../resnext101_64x4d/ixrt/README.md | 2 +- .../resnext50_32x4d/ixrt/README.md | 2 +- .../shufflenet_v1/ixrt/README.md | 2 +- .../shufflenetv2_x0_5/ixrt/README.md | 2 +- .../shufflenetv2_x1_0/ixrt/README.md | 2 +- .../shufflenetv2_x1_5/ixrt/README.md | 2 +- .../shufflenetv2_x2_0/ixrt/README.md | 2 +- .../squeezenet_v1_0/ixrt/README.md | 2 +- .../squeezenet_v1_1/ixrt/README.md | 2 +- .../swin_transformer_large/ixrt/README.md | 2 +- models/cv/classification/vgg16/ixrt/README.md | 2 +- .../wide_resnet50/ixrt/README.md | 2 +- .../face_recognition/facenet/ixrt/README.md | 2 +- .../mask_rcnn/ixrt/README.md | 2 +- .../solov1/ixrt/README.md | 2 +- .../cv/object_detection/atss/ixrt/README.md | 2 +- .../object_detection/centernet/ixrt/README.md | 2 +- .../cv/object_detection/detr/ixrt/README.md | 2 +- .../cv/object_detection/fcos/ixrt/README.md | 2 +- .../object_detection/foveabox/ixrt/README.md | 2 +- .../cv/object_detection/fsaf/ixrt/README.md | 2 +- .../cv/object_detection/hrnet/ixrt/README.md | 2 +- models/cv/object_detection/paa/ixrt/README.md | 2 +- .../retinaface/ixrt/README.md | 2 +- .../object_detection/retinanet/ixrt/README.md | 2 +- .../object_detection/yolov10/ixrt/README.md | 2 +- .../object_detection/yolov11/ixrt/README.md | 2 +- .../cv/object_detection/yolov3/ixrt/README.md | 2 +- .../cv/object_detection/yolov4/ixrt/README.md | 2 +- .../cv/object_detection/yolov5/ixrt/README.md | 2 +- .../object_detection/yolov5s/ixrt/README.md | 2 +- .../cv/object_detection/yolov6/ixrt/README.md | 2 +- .../cv/object_detection/yolov7/ixrt/README.md | 2 +- .../cv/object_detection/yolov8/ixrt/README.md | 2 +- .../cv/object_detection/yolov9/ixrt/README.md | 2 +- .../cv/object_detection/yolox/ixrt/README.md | 2 +- .../lightweight_openpose/ixrt/README.md | 2 +- .../cv/pose_estimation/rtmpose/ixrt/README.md | 2 +- models/nlp/plm/albert/ixrt/README.md | 2 +- models/nlp/plm/bert_base_squad/ixrt/README.md | 2 +- .../nlp/plm/bert_large_squad/ixrt/README.md | 2 +- models/nlp/plm/deberta/ixrt/README.md | 2 +- models/nlp/plm/roberta/ixrt/README.md | 2 +- models/nlp/plm/roformer/ixrt/README.md | 2 +- models/nlp/plm/videobert/ixrt/README.md | 2 +- .../wide_and_deep/ixrt/README.md | 2 +- 84 files changed, 110 insertions(+), 110 deletions(-) diff --git a/README.md b/README.md index fb347318..8e13d5c5 100644 --- a/README.md +++ b/README.md @@ -10,15 +10,15 @@
-DeepSparkInference推理模型库作为DeepSpark开源社区的核心项目,于2024年3月正式开源,一期甄选了48个推理模型示例,涵盖计算机视觉,自然语言处理,语音识别等领域,后续将逐步拓展更多AI领域。 +`DeepSparkInference`推理模型库作为`DeepSpark`开源社区的核心项目,于2024年3月正式开源,一期甄选了48个推理模型示例,涵盖计算机视觉,自然语言处理,语音识别等领域,后续将逐步拓展更多AI领域。 -DeepSparkInference中的模型提供了在国产推理引擎IGIE或IxRT下运行的推理示例和指导文档,部分模型提供了基于国产通用GPU[智铠100](https://www.iluvatar.com/productDetails?fullCode=cpjs-yj-tlxltt-zk100)的评测结果。 +`DeepSparkInference`中的模型提供了在国产推理引擎`IGIE`或`ixRT`下运行的推理示例和指导文档,部分模型提供了基于国产通用GPU[智铠100](https://www.iluvatar.com/productDetails?fullCode=cpjs-yj-tlxltt-zk100)的评测结果。 -IGIE(Iluvatar GPU Inference Engine)是基于TVM框架研发的高性能、高通用、全流程的AI推理引擎。支持多框架模型导入、量化、图优化、多算子库支持、多后端支持、算子自动调优等特性,为推理场景提供易部署、高吞吐量、低延迟的完整方案。 +`IGIE`(Iluvatar GPU Inference Engine)是基于TVM框架研发的高性能、高通用、全流程的AI推理引擎。支持多框架模型导入、量化、图优化、多算子库支持、多后端支持、算子自动调优等特性,为推理场景提供易部署、高吞吐量、低延迟的完整方案。 -IxRT(Iluvatar CoreX RunTime)是天数智芯自研的高性能推理引擎,专注于最大限度发挥天数智芯通用GPU 的性能,实现各领域模型的高性能推理。IxRT支持动态形状推理、插件和INT8/FP16推理等特性。 +`ixRT`(Iluvatar CoreX RunTime)是天数智芯自研的高性能推理引擎,专注于最大限度发挥天数智芯通用GPU 的性能,实现各领域模型的高性能推理。`ixRT`支持动态形状推理、插件和INT8/FP16推理等特性。 -DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型类别并拓展大模型推理。 +`DeepSparkInference`将按季度进行版本更新,后续会逐步丰富模型类别并拓展大模型推理。 ## 模型库 @@ -52,7 +52,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 视觉分类 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------------------|-------|--------------------------------------------------------|-----------------------------------------------------------|-----------| | AlexNet | FP16 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.3.0 | | | INT8 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.3.0 | @@ -152,7 +152,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 目标检测 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------|-------|-------------------------------------------------|-------------------------------------------------|-----------| | ATSS | FP16 | [✅](models/cv/object_detection/atss/igie) | | 4.3.0 | | CenterNet | FP16 | [✅](models/cv/object_detection/centernet/igie) | [✅](models/cv/object_detection/centernet/ixrt) | 4.3.0 | @@ -190,7 +190,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 人脸识别 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |---------|-------|------|----------------------------------------------|-----------| | FaceNet | FP16 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.3.0 | | | INT8 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.3.0 | @@ -204,7 +204,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 姿态估计 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |----------------------|-------|-----------------------------------------------|----------------------------------------------------------|-----------| | HRNetPose | FP16 | [✅](models/cv/pose_estimation/hrnetpose/igie) | | 4.3.0 | | Lightweight OpenPose | FP16 | | [✅](models/cv/pose_estimation/lightweight_openpose/ixrt) | 4.3.0 | @@ -212,20 +212,20 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 实例分割 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------|-------|------|-----------------------------------------------------|-----------| | Mask R-CNN | FP16 | | [✅](models/cv/instance_segmentation/mask_rcnn/ixrt) | 4.2.0 | | SOLOv1 | FP16 | | [✅](models/cv/instance_segmentation/solov1/ixrt) | 4.3.0 | #### 语义分割 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |-------|-------|------------------------------------------------|------|-----------| | UNet | FP16 | [✅](models/cv/semantic_segmentation/unet/igie) | | 4.3.0 | #### 多目标跟踪 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |---------------------|-------|----------------------------------------------------|------|-----------| | FastReID | FP16 | [✅](models/cv/multi_object_tracking/fastreid/igie) | | 4.3.0 | | DeepSort | FP16 | [✅](models/cv/multi_object_tracking/deepsort/igie) | | 4.3.0 | @@ -253,7 +253,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 预训练语言模型(PLM) -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------------|-------|-------------------------------------------|-------------------------------------------|-----------| | ALBERT | FP16 | | [✅](models/nlp/plm/albert/ixrt) | 4.3.0 | | BERT Base NER | INT8 | [✅](models/nlp/plm/bert_base_ner/igie) | | 4.3.0 | @@ -270,7 +270,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 语音识别 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |-----------------|-------|-----------------------------------------------------|-----------------------------------------------------------|-----------| | Conformer | FP16 | [✅](models/audio/speech_recognition/conformer/igie) | [✅](models/audio/speech_recognition/conformer/ixrt) | 4.3.0 | | Transformer ASR | FP16 | | [✅](models/audio/speech_recognition/transformer_asr/ixrt) | 4.2.0 | @@ -279,7 +279,7 @@ DeepSparkInference将按季度进行版本更新,后续会逐步丰富模型 #### 推荐系统 -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |-------------|-------|------|------------------------------------------------------|-----------| | Wide & Deep | FP16 | | [✅](models/others/recommendation/wide_and_deep/ixrt) | 4.3.0 | diff --git a/README_en.md b/README_en.md index 41fbda18..df00bc69 100644 --- a/README_en.md +++ b/README_en.md @@ -15,7 +15,7 @@ March 2024. The first release selected 48 inference model examples, covering fie language processing, and speech recognition. More AI domains will be gradually expanded in the future. The models in DeepSparkInference provide inference examples and guidance documents for running on inference engines IGIE -or IxRT self-developed by Iluvatar CoreX. Some models provide evaluation results based on the self-developed GPGPU +or ixRT self-developed by Iluvatar CoreX. Some models provide evaluation results based on the self-developed GPGPU Zhikai 100. IGIE (Iluvatar GPU Inference Engine) is a high-performance, highly gene, and end-to-end AI inference engine developed @@ -23,9 +23,9 @@ based on the TVM framework. It supports multi-framework model, quantization, gra support, multi-backend support, and automatic operator tuning, providing an easy-to-deploy, high-throughput, and low-latency complete solution for inference scenarios. -IxRT (Iluvatar CoreX RunTime) is a high-performance inference engine independently developed by Iluvatar CoreX, focusing +ixRT (Iluvatar CoreX RunTime) is a high-performance inference engine independently developed by Iluvatar CoreX, focusing on maximizing the performance of Iluvatar CoreX's GPGPU and achieving high-performance inference for models in various -fields. IxRT supports features such as dynamic shape inference, plugins, and INT8/FP16 inference. +fields. ixRT supports features such as dynamic shape inference, plugins, and INT8/FP16 inference. DeepSparkInference will be updated quarterly, and model categories will be gradually enriched, with large model inference to be expanded in the future. @@ -62,7 +62,7 @@ inference to be expanded in the future. #### Classification -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------------------|-------|--------------------------------------------------------|-----------------------------------------------------------|-----------| | AlexNet | FP16 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.2.0 | | | INT8 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.2.0 | @@ -162,7 +162,7 @@ inference to be expanded in the future. #### Object Detection -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------|-------|-------------------------------------------------|-------------------------------------------------|-----------| | ATSS | FP16 | [✅](models/cv/object_detection/atss/igie) | | 4.2.0 | | CenterNet | FP16 | [✅](models/cv/object_detection/centernet/igie) | [✅](models/cv/object_detection/centernet/ixrt) | 4.2.0 | @@ -200,7 +200,7 @@ inference to be expanded in the future. #### Face Recognition -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |---------|-------|------|----------------------------------------------|-----------| | FaceNet | FP16 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.2.0 | | | INT8 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.2.0 | @@ -214,7 +214,7 @@ inference to be expanded in the future. #### Pose Estimation -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |----------------------|-------|-----------------------------------------------|----------------------------------------------------------|-----------| | HRNetPose | FP16 | [✅](models/cv/pose_estimation/hrnetpose/igie) | | 4.2.0 | | Lightweight OpenPose | FP16 | | [✅](models/cv/pose_estimation/lightweight_openpose/ixrt) | 4.2.0 | @@ -222,20 +222,20 @@ inference to be expanded in the future. #### Instance Segmentation -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------|-------|------|-----------------------------------------------------|-----------| | Mask R-CNN | FP16 | | [✅](models/cv/instance_segmentation/mask_rcnn/ixrt) | 4.2.0 | | SOLOv1 | FP16 | | [✅](models/cv/instance_segmentation/solov1/ixrt) | 4.2.0 | #### Semantic Segmentation -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |-------|-------|------------------------------------------------|------|-----------| | UNet | FP16 | [✅](models/cv/semantic_segmentation/unet/igie) | | 4.2.0 | #### Multi-Object Tracking -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |---------------------|-------|----------------------------------------------------|------|-----------| | FastReID | FP16 | [✅](models/cv/multi_object_tracking/fastreid/igie) | | 4.2.0 | | DeepSort | FP16 | [✅](models/cv/multi_object_tracking/deepsort/igie) | | 4.2.0 | @@ -263,7 +263,7 @@ inference to be expanded in the future. #### PLM (Pre-trained Language Model) -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------------|-------|-------------------------------------------|-------------------------------------------|-----------| | ALBERT | FP16 | | [✅](models/nlp/plm/albert/ixrt) | 4.2.0 | | BERT Base NER | INT8 | [✅](models/nlp/plm/bert_base_ner/igie) | | 4.2.0 | @@ -280,7 +280,7 @@ inference to be expanded in the future. #### Speech Recognition -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |-----------------|-------|-----------------------------------------------------|-----------------------------------------------------------|-----------| | Conformer | FP16 | [✅](models/audio/speech_recognition/conformer/igie) | [✅](models/audio/speech_recognition/conformer/ixrt) | 4.2.0 | | Transformer ASR | FP16 | | [✅](models/audio/speech_recognition/transformer_asr/ixrt) | 4.2.0 | @@ -289,7 +289,7 @@ inference to be expanded in the future. #### Recommendation Systems -| Model | Prec. | IGIE | IxRT | IXUCA SDK | +| Model | Prec. | IGIE | ixRT | IXUCA SDK | |-------------|-------|------|------------------------------------------------------|-----------| | Wide & Deep | FP16 | | [✅](models/others/recommendation/wide_and_deep/ixrt) | 4.2.0 | diff --git a/models/audio/speech_recognition/conformer/ixrt/README.md b/models/audio/speech_recognition/conformer/ixrt/README.md index 56ea26cc..fd7a35de 100644 --- a/models/audio/speech_recognition/conformer/ixrt/README.md +++ b/models/audio/speech_recognition/conformer/ixrt/README.md @@ -1,4 +1,4 @@ -# Conformer (IxRT) +# Conformer (ixRT) ## Model Description diff --git a/models/audio/speech_recognition/transformer_asr/ixrt/README.md b/models/audio/speech_recognition/transformer_asr/ixrt/README.md index 5792574e..a6b069c4 100644 --- a/models/audio/speech_recognition/transformer_asr/ixrt/README.md +++ b/models/audio/speech_recognition/transformer_asr/ixrt/README.md @@ -1,4 +1,4 @@ -# Transformer ASR (IxRT) +# Transformer ASR (ixRT) ## Model Description diff --git a/models/cv/classification/alexnet/ixrt/README.md b/models/cv/classification/alexnet/ixrt/README.md index 34b11957..221eb875 100644 --- a/models/cv/classification/alexnet/ixrt/README.md +++ b/models/cv/classification/alexnet/ixrt/README.md @@ -1,4 +1,4 @@ -# AlexNet (IxRT) +# AlexNet (ixRT) ## Model Description diff --git a/models/cv/classification/clip/ixrt/README.md b/models/cv/classification/clip/ixrt/README.md index a399d9d5..adf7d9f4 100644 --- a/models/cv/classification/clip/ixrt/README.md +++ b/models/cv/classification/clip/ixrt/README.md @@ -1,4 +1,4 @@ -# CLIP (IxRT) +# CLIP (ixRT) ## Model Description diff --git a/models/cv/classification/convnext_base/ixrt/README.md b/models/cv/classification/convnext_base/ixrt/README.md index 9dfc874f..ae393cb7 100644 --- a/models/cv/classification/convnext_base/ixrt/README.md +++ b/models/cv/classification/convnext_base/ixrt/README.md @@ -1,4 +1,4 @@ -# ConvNeXt Base (IxRT) +# ConvNeXt Base (ixRT) ## Model Description diff --git a/models/cv/classification/convnext_small/ixrt/README.md b/models/cv/classification/convnext_small/ixrt/README.md index 8f216b60..368c243f 100644 --- a/models/cv/classification/convnext_small/ixrt/README.md +++ b/models/cv/classification/convnext_small/ixrt/README.md @@ -1,4 +1,4 @@ -# ConvNeXt Small (IxRT) +# ConvNeXt Small (ixRT) ## Model Description diff --git a/models/cv/classification/cspdarknet53/ixrt/README.md b/models/cv/classification/cspdarknet53/ixrt/README.md index 861860d8..ecb8aefb 100644 --- a/models/cv/classification/cspdarknet53/ixrt/README.md +++ b/models/cv/classification/cspdarknet53/ixrt/README.md @@ -1,4 +1,4 @@ -# CSPDarkNet53 (IxRT) +# CSPDarkNet53 (ixRT) ## Model Description diff --git a/models/cv/classification/cspresnet50/ixrt/README.md b/models/cv/classification/cspresnet50/ixrt/README.md index 01bed75f..8b8e0d26 100644 --- a/models/cv/classification/cspresnet50/ixrt/README.md +++ b/models/cv/classification/cspresnet50/ixrt/README.md @@ -1,4 +1,4 @@ -# CSPResNet50 (IxRT) +# CSPResNet50 (ixRT) ## Model Description diff --git a/models/cv/classification/cspresnext50/ixrt/README.md b/models/cv/classification/cspresnext50/ixrt/README.md index a0a83eed..52151b73 100644 --- a/models/cv/classification/cspresnext50/ixrt/README.md +++ b/models/cv/classification/cspresnext50/ixrt/README.md @@ -1,4 +1,4 @@ -# CSPResNeXt50 (IxRT) +# CSPResNeXt50 (ixRT) ## Model Description diff --git a/models/cv/classification/deit_tiny/ixrt/README.md b/models/cv/classification/deit_tiny/ixrt/README.md index 5f5a92e9..d463290e 100644 --- a/models/cv/classification/deit_tiny/ixrt/README.md +++ b/models/cv/classification/deit_tiny/ixrt/README.md @@ -1,4 +1,4 @@ -# DeiT-tiny (IxRT) +# DeiT-tiny (ixRT) ## Model Description diff --git a/models/cv/classification/densenet121/ixrt/README.md b/models/cv/classification/densenet121/ixrt/README.md index cf204af8..f7e92930 100644 --- a/models/cv/classification/densenet121/ixrt/README.md +++ b/models/cv/classification/densenet121/ixrt/README.md @@ -1,4 +1,4 @@ -# DenseNet (IxRT) +# DenseNet (ixRT) ## Model Description diff --git a/models/cv/classification/densenet161/ixrt/README.md b/models/cv/classification/densenet161/ixrt/README.md index 294d2d3c..e6cd9ebd 100644 --- a/models/cv/classification/densenet161/ixrt/README.md +++ b/models/cv/classification/densenet161/ixrt/README.md @@ -1,4 +1,4 @@ -# DenseNet161 (IxRT) +# DenseNet161 (ixRT) ## Model Description diff --git a/models/cv/classification/densenet169/ixrt/README.md b/models/cv/classification/densenet169/ixrt/README.md index c105e417..44184471 100644 --- a/models/cv/classification/densenet169/ixrt/README.md +++ b/models/cv/classification/densenet169/ixrt/README.md @@ -1,4 +1,4 @@ -# DenseNet169 (IxRT) +# DenseNet169 (ixRT) ## Model Description diff --git a/models/cv/classification/densenet201/ixrt/README.md b/models/cv/classification/densenet201/ixrt/README.md index 7b9810b2..3f9e206d 100644 --- a/models/cv/classification/densenet201/ixrt/README.md +++ b/models/cv/classification/densenet201/ixrt/README.md @@ -1,4 +1,4 @@ -# DenseNet201 (IxRT) +# DenseNet201 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_b0/ixrt/README.md b/models/cv/classification/efficientnet_b0/ixrt/README.md index 7606d841..d4cfa351 100644 --- a/models/cv/classification/efficientnet_b0/ixrt/README.md +++ b/models/cv/classification/efficientnet_b0/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet B0 (IxRT) +# EfficientNet B0 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_b1/ixrt/README.md b/models/cv/classification/efficientnet_b1/ixrt/README.md index dbad178f..9d6cbe93 100644 --- a/models/cv/classification/efficientnet_b1/ixrt/README.md +++ b/models/cv/classification/efficientnet_b1/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet B1 (IxRT) +# EfficientNet B1 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_b2/ixrt/README.md b/models/cv/classification/efficientnet_b2/ixrt/README.md index 80d95edf..860b1083 100644 --- a/models/cv/classification/efficientnet_b2/ixrt/README.md +++ b/models/cv/classification/efficientnet_b2/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet B2 (IxRT) +# EfficientNet B2 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_b3/ixrt/README.md b/models/cv/classification/efficientnet_b3/ixrt/README.md index fd312942..749d61a7 100644 --- a/models/cv/classification/efficientnet_b3/ixrt/README.md +++ b/models/cv/classification/efficientnet_b3/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet B3 (IxRT) +# EfficientNet B3 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_b4/ixrt/README.md b/models/cv/classification/efficientnet_b4/ixrt/README.md index 001356bd..aa8710d3 100644 --- a/models/cv/classification/efficientnet_b4/ixrt/README.md +++ b/models/cv/classification/efficientnet_b4/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet B4 (IxRT) +# EfficientNet B4 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_b5/ixrt/README.md b/models/cv/classification/efficientnet_b5/ixrt/README.md index 7d661ee0..8489d395 100644 --- a/models/cv/classification/efficientnet_b5/ixrt/README.md +++ b/models/cv/classification/efficientnet_b5/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet B5 (IxRT) +# EfficientNet B5 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_v2/ixrt/README.md b/models/cv/classification/efficientnet_v2/ixrt/README.md index cdcf5de8..57c974ae 100755 --- a/models/cv/classification/efficientnet_v2/ixrt/README.md +++ b/models/cv/classification/efficientnet_v2/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNetV2 (IxRT) +# EfficientNetV2 (ixRT) ## Model Description diff --git a/models/cv/classification/efficientnet_v2_s/ixrt/README.md b/models/cv/classification/efficientnet_v2_s/ixrt/README.md index bf9a90ee..28ebce7d 100644 --- a/models/cv/classification/efficientnet_v2_s/ixrt/README.md +++ b/models/cv/classification/efficientnet_v2_s/ixrt/README.md @@ -1,4 +1,4 @@ -# EfficientNet_v2_s (IxRT) +# EfficientNet_v2_s (ixRT) ## Model Description diff --git a/models/cv/classification/googlenet/ixrt/README.md b/models/cv/classification/googlenet/ixrt/README.md index 252bc958..962bd3ce 100644 --- a/models/cv/classification/googlenet/ixrt/README.md +++ b/models/cv/classification/googlenet/ixrt/README.md @@ -1,4 +1,4 @@ -# GoogLeNet (IxRT) +# GoogLeNet (ixRT) ## Model Description diff --git a/models/cv/classification/hrnet_w18/ixrt/README.md b/models/cv/classification/hrnet_w18/ixrt/README.md index 0d121c8b..ea539f45 100644 --- a/models/cv/classification/hrnet_w18/ixrt/README.md +++ b/models/cv/classification/hrnet_w18/ixrt/README.md @@ -1,4 +1,4 @@ -# HRNet-W18 (IxRT) +# HRNet-W18 (ixRT) ## Model Description diff --git a/models/cv/classification/inception_resnet_v2/ixrt/README.md b/models/cv/classification/inception_resnet_v2/ixrt/README.md index 60a8c5f5..15deee31 100755 --- a/models/cv/classification/inception_resnet_v2/ixrt/README.md +++ b/models/cv/classification/inception_resnet_v2/ixrt/README.md @@ -1,4 +1,4 @@ -# Inception-ResNet-V2 (IxRT) +# Inception-ResNet-V2 (ixRT) ## Model Description diff --git a/models/cv/classification/inception_v3/ixrt/README.md b/models/cv/classification/inception_v3/ixrt/README.md index 5fd218df..f2d35856 100755 --- a/models/cv/classification/inception_v3/ixrt/README.md +++ b/models/cv/classification/inception_v3/ixrt/README.md @@ -1,4 +1,4 @@ -# Inception V3 (IxRT) +# Inception V3 (ixRT) ## Model Description diff --git a/models/cv/classification/mobilenet_v2/ixrt/README.md b/models/cv/classification/mobilenet_v2/ixrt/README.md index e6c658cb..5a512ecb 100644 --- a/models/cv/classification/mobilenet_v2/ixrt/README.md +++ b/models/cv/classification/mobilenet_v2/ixrt/README.md @@ -1,4 +1,4 @@ -# MobileNetV2 (IxRT) +# MobileNetV2 (ixRT) ## Model Description diff --git a/models/cv/classification/mobilenet_v3/ixrt/README.md b/models/cv/classification/mobilenet_v3/ixrt/README.md index 149bed83..91f857e6 100644 --- a/models/cv/classification/mobilenet_v3/ixrt/README.md +++ b/models/cv/classification/mobilenet_v3/ixrt/README.md @@ -1,4 +1,4 @@ -# MobileNetV3 (IxRT) +# MobileNetV3 (ixRT) ## Model Description diff --git a/models/cv/classification/repvgg/ixrt/README.md b/models/cv/classification/repvgg/ixrt/README.md index 897b3375..fbc5915b 100644 --- a/models/cv/classification/repvgg/ixrt/README.md +++ b/models/cv/classification/repvgg/ixrt/README.md @@ -1,4 +1,4 @@ -# RepVGG (IxRT) +# RepVGG (ixRT) ## Model Description diff --git a/models/cv/classification/res2net50/ixrt/README.md b/models/cv/classification/res2net50/ixrt/README.md index b0ce62ca..0f86895f 100644 --- a/models/cv/classification/res2net50/ixrt/README.md +++ b/models/cv/classification/res2net50/ixrt/README.md @@ -1,4 +1,4 @@ -# Res2Net50 (IxRT) +# Res2Net50 (ixRT) ## Model Description diff --git a/models/cv/classification/resnet101/ixrt/README.md b/models/cv/classification/resnet101/ixrt/README.md index 8b91c5b5..4fe1caa1 100644 --- a/models/cv/classification/resnet101/ixrt/README.md +++ b/models/cv/classification/resnet101/ixrt/README.md @@ -1,4 +1,4 @@ -# Resnet101 (IxRT) +# Resnet101 (ixRT) ## Model Description diff --git a/models/cv/classification/resnet18/ixrt/README.md b/models/cv/classification/resnet18/ixrt/README.md index bfb0d4b3..455ff186 100644 --- a/models/cv/classification/resnet18/ixrt/README.md +++ b/models/cv/classification/resnet18/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNet18 (IxRT) +# ResNet18 (ixRT) ## Model Description diff --git a/models/cv/classification/resnet34/ixrt/README.md b/models/cv/classification/resnet34/ixrt/README.md index fc63548e..9d33b97c 100644 --- a/models/cv/classification/resnet34/ixrt/README.md +++ b/models/cv/classification/resnet34/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNet34 (IxRT) +# ResNet34 (ixRT) ## Model Description diff --git a/models/cv/classification/resnet50/ixrt/README.md b/models/cv/classification/resnet50/ixrt/README.md index c4edfe1d..4d5a870c 100644 --- a/models/cv/classification/resnet50/ixrt/README.md +++ b/models/cv/classification/resnet50/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNet50 (IxRT) +# ResNet50 (ixRT) ## Model Description diff --git a/models/cv/classification/resnetv1d50/ixrt/README.md b/models/cv/classification/resnetv1d50/ixrt/README.md index 5f85b4e1..7d3be62c 100644 --- a/models/cv/classification/resnetv1d50/ixrt/README.md +++ b/models/cv/classification/resnetv1d50/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNetV1D50 (IxRT) +# ResNetV1D50 (ixRT) ## Model Description diff --git a/models/cv/classification/resnext101_32x8d/ixrt/README.md b/models/cv/classification/resnext101_32x8d/ixrt/README.md index 84a63be8..6df37e99 100644 --- a/models/cv/classification/resnext101_32x8d/ixrt/README.md +++ b/models/cv/classification/resnext101_32x8d/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNext101_32x8d (IxRT) +# ResNext101_32x8d (ixRT) ## Model Description diff --git a/models/cv/classification/resnext101_64x4d/ixrt/README.md b/models/cv/classification/resnext101_64x4d/ixrt/README.md index 503d2847..0f22a5a5 100644 --- a/models/cv/classification/resnext101_64x4d/ixrt/README.md +++ b/models/cv/classification/resnext101_64x4d/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNext101_64x4d (IxRT) +# ResNext101_64x4d (ixRT) ## Model Description diff --git a/models/cv/classification/resnext50_32x4d/ixrt/README.md b/models/cv/classification/resnext50_32x4d/ixrt/README.md index da346292..aa0034dc 100644 --- a/models/cv/classification/resnext50_32x4d/ixrt/README.md +++ b/models/cv/classification/resnext50_32x4d/ixrt/README.md @@ -1,4 +1,4 @@ -# ResNext50_32x4d (IxRT) +# ResNext50_32x4d (ixRT) ## Model Description diff --git a/models/cv/classification/shufflenet_v1/ixrt/README.md b/models/cv/classification/shufflenet_v1/ixrt/README.md index 9417776f..11caf110 100644 --- a/models/cv/classification/shufflenet_v1/ixrt/README.md +++ b/models/cv/classification/shufflenet_v1/ixrt/README.md @@ -1,4 +1,4 @@ -# ShuffleNetV1 (IxRT) +# ShuffleNetV1 (ixRT) ## Model Description diff --git a/models/cv/classification/shufflenetv2_x0_5/ixrt/README.md b/models/cv/classification/shufflenetv2_x0_5/ixrt/README.md index dc1d4289..282ab1c5 100644 --- a/models/cv/classification/shufflenetv2_x0_5/ixrt/README.md +++ b/models/cv/classification/shufflenetv2_x0_5/ixrt/README.md @@ -1,4 +1,4 @@ -# ShuffleNetV2_x0_5 (IxRT) +# ShuffleNetV2_x0_5 (ixRT) ## Model Description diff --git a/models/cv/classification/shufflenetv2_x1_0/ixrt/README.md b/models/cv/classification/shufflenetv2_x1_0/ixrt/README.md index 5122bd33..58accdb7 100644 --- a/models/cv/classification/shufflenetv2_x1_0/ixrt/README.md +++ b/models/cv/classification/shufflenetv2_x1_0/ixrt/README.md @@ -1,4 +1,4 @@ -# ShuffleNetV2_x1_0 (IxRT) +# ShuffleNetV2_x1_0 (ixRT) ## Model Description diff --git a/models/cv/classification/shufflenetv2_x1_5/ixrt/README.md b/models/cv/classification/shufflenetv2_x1_5/ixrt/README.md index c10d0a2c..d10e09fc 100644 --- a/models/cv/classification/shufflenetv2_x1_5/ixrt/README.md +++ b/models/cv/classification/shufflenetv2_x1_5/ixrt/README.md @@ -1,4 +1,4 @@ -# ShuffleNetV2_x1_5 (IxRT) +# ShuffleNetV2_x1_5 (ixRT) ## Model Description diff --git a/models/cv/classification/shufflenetv2_x2_0/ixrt/README.md b/models/cv/classification/shufflenetv2_x2_0/ixrt/README.md index 529e5f86..8f5963c1 100644 --- a/models/cv/classification/shufflenetv2_x2_0/ixrt/README.md +++ b/models/cv/classification/shufflenetv2_x2_0/ixrt/README.md @@ -1,4 +1,4 @@ -# ShuffleNetV2_x2_0 (IxRT) +# ShuffleNetV2_x2_0 (ixRT) ## Model Description diff --git a/models/cv/classification/squeezenet_v1_0/ixrt/README.md b/models/cv/classification/squeezenet_v1_0/ixrt/README.md index 69d79fed..e5256e6f 100644 --- a/models/cv/classification/squeezenet_v1_0/ixrt/README.md +++ b/models/cv/classification/squeezenet_v1_0/ixrt/README.md @@ -1,4 +1,4 @@ -# SqueezeNet 1.0 (IxRT) +# SqueezeNet 1.0 (ixRT) ## Model Description diff --git a/models/cv/classification/squeezenet_v1_1/ixrt/README.md b/models/cv/classification/squeezenet_v1_1/ixrt/README.md index 39811e76..15699bb8 100644 --- a/models/cv/classification/squeezenet_v1_1/ixrt/README.md +++ b/models/cv/classification/squeezenet_v1_1/ixrt/README.md @@ -1,4 +1,4 @@ -# SqueezeNet 1.1 (IxRT) +# SqueezeNet 1.1 (ixRT) ## Model Description diff --git a/models/cv/classification/swin_transformer_large/ixrt/README.md b/models/cv/classification/swin_transformer_large/ixrt/README.md index a7c0c321..9a951a1b 100644 --- a/models/cv/classification/swin_transformer_large/ixrt/README.md +++ b/models/cv/classification/swin_transformer_large/ixrt/README.md @@ -1,4 +1,4 @@ -# Swin Transformer Large (IxRT) +# Swin Transformer Large (ixRT) ## Model Description diff --git a/models/cv/classification/vgg16/ixrt/README.md b/models/cv/classification/vgg16/ixrt/README.md index c763274c..39b4968d 100644 --- a/models/cv/classification/vgg16/ixrt/README.md +++ b/models/cv/classification/vgg16/ixrt/README.md @@ -1,4 +1,4 @@ -# VGG16 (IxRT) +# VGG16 (ixRT) ## Model Description diff --git a/models/cv/classification/wide_resnet50/ixrt/README.md b/models/cv/classification/wide_resnet50/ixrt/README.md index 7608697f..889e3def 100644 --- a/models/cv/classification/wide_resnet50/ixrt/README.md +++ b/models/cv/classification/wide_resnet50/ixrt/README.md @@ -1,4 +1,4 @@ -# Wide ResNet50 (IxRT) +# Wide ResNet50 (ixRT) ## Model Description diff --git a/models/cv/face_recognition/facenet/ixrt/README.md b/models/cv/face_recognition/facenet/ixrt/README.md index c44213ad..e9325571 100644 --- a/models/cv/face_recognition/facenet/ixrt/README.md +++ b/models/cv/face_recognition/facenet/ixrt/README.md @@ -1,4 +1,4 @@ -# FaceNet (IxRT) +# FaceNet (ixRT) ## Model Description diff --git a/models/cv/instance_segmentation/mask_rcnn/ixrt/README.md b/models/cv/instance_segmentation/mask_rcnn/ixrt/README.md index 9afd0d7a..e4768d9b 100644 --- a/models/cv/instance_segmentation/mask_rcnn/ixrt/README.md +++ b/models/cv/instance_segmentation/mask_rcnn/ixrt/README.md @@ -1,4 +1,4 @@ -# Mask R-CNN (IxRT) +# Mask R-CNN (ixRT) ## Model Description diff --git a/models/cv/instance_segmentation/solov1/ixrt/README.md b/models/cv/instance_segmentation/solov1/ixrt/README.md index 9a59fda0..a3002a59 100644 --- a/models/cv/instance_segmentation/solov1/ixrt/README.md +++ b/models/cv/instance_segmentation/solov1/ixrt/README.md @@ -1,4 +1,4 @@ -# SOLOv1 (IxRT) +# SOLOv1 (ixRT) ## Model Description diff --git a/models/cv/object_detection/atss/ixrt/README.md b/models/cv/object_detection/atss/ixrt/README.md index 16808f76..88e55719 100644 --- a/models/cv/object_detection/atss/ixrt/README.md +++ b/models/cv/object_detection/atss/ixrt/README.md @@ -1,4 +1,4 @@ -# ATSS (IxRT) +# ATSS (ixRT) ## Model Description diff --git a/models/cv/object_detection/centernet/ixrt/README.md b/models/cv/object_detection/centernet/ixrt/README.md index f9c9bcf1..292ce276 100644 --- a/models/cv/object_detection/centernet/ixrt/README.md +++ b/models/cv/object_detection/centernet/ixrt/README.md @@ -1,4 +1,4 @@ -# CenterNet (IxRT) +# CenterNet (ixRT) ## Model Description diff --git a/models/cv/object_detection/detr/ixrt/README.md b/models/cv/object_detection/detr/ixrt/README.md index 2a79ab6a..2386a675 100755 --- a/models/cv/object_detection/detr/ixrt/README.md +++ b/models/cv/object_detection/detr/ixrt/README.md @@ -1,4 +1,4 @@ -# DETR (IxRT) +# DETR (ixRT) ## Model Description diff --git a/models/cv/object_detection/fcos/ixrt/README.md b/models/cv/object_detection/fcos/ixrt/README.md index c818daad..ca628784 100755 --- a/models/cv/object_detection/fcos/ixrt/README.md +++ b/models/cv/object_detection/fcos/ixrt/README.md @@ -1,4 +1,4 @@ -# FCOS (IxRT) +# FCOS (ixRT) ## Model Description diff --git a/models/cv/object_detection/foveabox/ixrt/README.md b/models/cv/object_detection/foveabox/ixrt/README.md index 1c71c519..b131f0d7 100644 --- a/models/cv/object_detection/foveabox/ixrt/README.md +++ b/models/cv/object_detection/foveabox/ixrt/README.md @@ -1,4 +1,4 @@ -# FoveaBox (IxRT) +# FoveaBox (ixRT) ## Model Description diff --git a/models/cv/object_detection/fsaf/ixrt/README.md b/models/cv/object_detection/fsaf/ixrt/README.md index b228bf91..f7093224 100644 --- a/models/cv/object_detection/fsaf/ixrt/README.md +++ b/models/cv/object_detection/fsaf/ixrt/README.md @@ -1,4 +1,4 @@ -# FSAF (IxRT) +# FSAF (ixRT) ## Model Description diff --git a/models/cv/object_detection/hrnet/ixrt/README.md b/models/cv/object_detection/hrnet/ixrt/README.md index 14863090..93b2c5f7 100644 --- a/models/cv/object_detection/hrnet/ixrt/README.md +++ b/models/cv/object_detection/hrnet/ixrt/README.md @@ -1,4 +1,4 @@ -# HRNet (IxRT) +# HRNet (ixRT) ## Model Description diff --git a/models/cv/object_detection/paa/ixrt/README.md b/models/cv/object_detection/paa/ixrt/README.md index 97cbd9ec..4d86d44b 100644 --- a/models/cv/object_detection/paa/ixrt/README.md +++ b/models/cv/object_detection/paa/ixrt/README.md @@ -1,4 +1,4 @@ -# PAA (IxRT) +# PAA (ixRT) ## Model Description diff --git a/models/cv/object_detection/retinaface/ixrt/README.md b/models/cv/object_detection/retinaface/ixrt/README.md index 9b0bb6ca..19a42cc8 100644 --- a/models/cv/object_detection/retinaface/ixrt/README.md +++ b/models/cv/object_detection/retinaface/ixrt/README.md @@ -1,4 +1,4 @@ -# RetinaFace (IxRT) +# RetinaFace (ixRT) ## Model Description diff --git a/models/cv/object_detection/retinanet/ixrt/README.md b/models/cv/object_detection/retinanet/ixrt/README.md index b3947361..1993f3a6 100644 --- a/models/cv/object_detection/retinanet/ixrt/README.md +++ b/models/cv/object_detection/retinanet/ixrt/README.md @@ -1,4 +1,4 @@ -# RetinaNet (IxRT) +# RetinaNet (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov10/ixrt/README.md b/models/cv/object_detection/yolov10/ixrt/README.md index d1475f8b..54ccc13d 100644 --- a/models/cv/object_detection/yolov10/ixrt/README.md +++ b/models/cv/object_detection/yolov10/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv10 (IxRT) +# YOLOv10 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov11/ixrt/README.md b/models/cv/object_detection/yolov11/ixrt/README.md index 78faa528..1368906e 100644 --- a/models/cv/object_detection/yolov11/ixrt/README.md +++ b/models/cv/object_detection/yolov11/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv11 (IxRT) +# YOLOv11 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov3/ixrt/README.md b/models/cv/object_detection/yolov3/ixrt/README.md index 34b82267..f2bead09 100644 --- a/models/cv/object_detection/yolov3/ixrt/README.md +++ b/models/cv/object_detection/yolov3/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv3 (IxRT) +# YOLOv3 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov4/ixrt/README.md b/models/cv/object_detection/yolov4/ixrt/README.md index 2795658b..16811bb4 100644 --- a/models/cv/object_detection/yolov4/ixrt/README.md +++ b/models/cv/object_detection/yolov4/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv4 (IxRT) +# YOLOv4 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov5/ixrt/README.md b/models/cv/object_detection/yolov5/ixrt/README.md index 5e8cea88..94f8c00e 100644 --- a/models/cv/object_detection/yolov5/ixrt/README.md +++ b/models/cv/object_detection/yolov5/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv5-m (IxRT) +# YOLOv5-m (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov5s/ixrt/README.md b/models/cv/object_detection/yolov5s/ixrt/README.md index b00b00cb..2c398200 100755 --- a/models/cv/object_detection/yolov5s/ixrt/README.md +++ b/models/cv/object_detection/yolov5s/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv5s (IxRT) +# YOLOv5s (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov6/ixrt/README.md b/models/cv/object_detection/yolov6/ixrt/README.md index 0aa4a9d4..3ec3c1b6 100644 --- a/models/cv/object_detection/yolov6/ixrt/README.md +++ b/models/cv/object_detection/yolov6/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv6 (IxRT) +# YOLOv6 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov7/ixrt/README.md b/models/cv/object_detection/yolov7/ixrt/README.md index 1b9562ef..0f448dd0 100644 --- a/models/cv/object_detection/yolov7/ixrt/README.md +++ b/models/cv/object_detection/yolov7/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv7 (IxRT) +# YOLOv7 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov8/ixrt/README.md b/models/cv/object_detection/yolov8/ixrt/README.md index 7a54654f..753853a5 100644 --- a/models/cv/object_detection/yolov8/ixrt/README.md +++ b/models/cv/object_detection/yolov8/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv8 (IxRT) +# YOLOv8 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolov9/ixrt/README.md b/models/cv/object_detection/yolov9/ixrt/README.md index 1229e742..f4cd7d80 100644 --- a/models/cv/object_detection/yolov9/ixrt/README.md +++ b/models/cv/object_detection/yolov9/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOv9 (IxRT) +# YOLOv9 (ixRT) ## Model Description diff --git a/models/cv/object_detection/yolox/ixrt/README.md b/models/cv/object_detection/yolox/ixrt/README.md index 8e73c40f..75f3a558 100644 --- a/models/cv/object_detection/yolox/ixrt/README.md +++ b/models/cv/object_detection/yolox/ixrt/README.md @@ -1,4 +1,4 @@ -# YOLOX (IxRT) +# YOLOX (ixRT) ## Model Description diff --git a/models/cv/pose_estimation/lightweight_openpose/ixrt/README.md b/models/cv/pose_estimation/lightweight_openpose/ixrt/README.md index 54b8579a..f6b5c621 100644 --- a/models/cv/pose_estimation/lightweight_openpose/ixrt/README.md +++ b/models/cv/pose_estimation/lightweight_openpose/ixrt/README.md @@ -1,4 +1,4 @@ -# Lightweight OpenPose (IxRT) +# Lightweight OpenPose (ixRT) ## Model Description diff --git a/models/cv/pose_estimation/rtmpose/ixrt/README.md b/models/cv/pose_estimation/rtmpose/ixrt/README.md index c7576241..2396196d 100644 --- a/models/cv/pose_estimation/rtmpose/ixrt/README.md +++ b/models/cv/pose_estimation/rtmpose/ixrt/README.md @@ -1,4 +1,4 @@ -# RTMPose (IxRT) +# RTMPose (ixRT) ## Model Description diff --git a/models/nlp/plm/albert/ixrt/README.md b/models/nlp/plm/albert/ixrt/README.md index 1248945b..e2fc2d46 100644 --- a/models/nlp/plm/albert/ixrt/README.md +++ b/models/nlp/plm/albert/ixrt/README.md @@ -1,4 +1,4 @@ -# ALBERT (IxRT) +# ALBERT (ixRT) ## Model Description diff --git a/models/nlp/plm/bert_base_squad/ixrt/README.md b/models/nlp/plm/bert_base_squad/ixrt/README.md index 0372004e..901ba727 100644 --- a/models/nlp/plm/bert_base_squad/ixrt/README.md +++ b/models/nlp/plm/bert_base_squad/ixrt/README.md @@ -1,4 +1,4 @@ -# BERT Base SQuAD (IxRT) +# BERT Base SQuAD (ixRT) ## Model Description diff --git a/models/nlp/plm/bert_large_squad/ixrt/README.md b/models/nlp/plm/bert_large_squad/ixrt/README.md index a6a90eab..dcb9f8c5 100644 --- a/models/nlp/plm/bert_large_squad/ixrt/README.md +++ b/models/nlp/plm/bert_large_squad/ixrt/README.md @@ -1,4 +1,4 @@ -# BERT Large SQuAD (IxRT) +# BERT Large SQuAD (ixRT) ## Model Description diff --git a/models/nlp/plm/deberta/ixrt/README.md b/models/nlp/plm/deberta/ixrt/README.md index df60dcb7..087e2b36 100644 --- a/models/nlp/plm/deberta/ixrt/README.md +++ b/models/nlp/plm/deberta/ixrt/README.md @@ -1,4 +1,4 @@ -# DeBERTa (IxRT) +# DeBERTa (ixRT) ## Model Description diff --git a/models/nlp/plm/roberta/ixrt/README.md b/models/nlp/plm/roberta/ixrt/README.md index e2341235..346f64a1 100644 --- a/models/nlp/plm/roberta/ixrt/README.md +++ b/models/nlp/plm/roberta/ixrt/README.md @@ -1,4 +1,4 @@ -# RoBERTa (IxRT) +# RoBERTa (ixRT) ## Model Description diff --git a/models/nlp/plm/roformer/ixrt/README.md b/models/nlp/plm/roformer/ixrt/README.md index bad19403..3838aa97 100644 --- a/models/nlp/plm/roformer/ixrt/README.md +++ b/models/nlp/plm/roformer/ixrt/README.md @@ -1,4 +1,4 @@ -# RoFormer (IxRT) +# RoFormer (ixRT) ## Model Description diff --git a/models/nlp/plm/videobert/ixrt/README.md b/models/nlp/plm/videobert/ixrt/README.md index ded01144..01197a1b 100644 --- a/models/nlp/plm/videobert/ixrt/README.md +++ b/models/nlp/plm/videobert/ixrt/README.md @@ -1,4 +1,4 @@ -# VideoBERT (IxRT) +# VideoBERT (ixRT) ## Model Description diff --git a/models/others/recommendation/wide_and_deep/ixrt/README.md b/models/others/recommendation/wide_and_deep/ixrt/README.md index f50911d1..19772222 100644 --- a/models/others/recommendation/wide_and_deep/ixrt/README.md +++ b/models/others/recommendation/wide_and_deep/ixrt/README.md @@ -1,4 +1,4 @@ -# Wide & Deep (IxRT) +# Wide & Deep (ixRT) ## Model Description -- Gitee From 9b5592bde8ecfa0909433f1a32c75a614bf4da74 Mon Sep 17 00:00:00 2001 From: "mingjiang.li" Date: Mon, 28 Jul 2025 11:27:20 +0800 Subject: [PATCH 3/3] sync 4.3.0 support in readme_en.md --- README.md | 6 +- README_en.md | 368 +++++++++++++++++++++++++-------------------------- 2 files changed, 187 insertions(+), 187 deletions(-) diff --git a/README.md b/README.md index 8e13d5c5..5aafc356 100644 --- a/README.md +++ b/README.md @@ -123,8 +123,8 @@ | ResNetV1D50 | FP16 | [✅](models/cv/classification/resnetv1d50/igie) | [✅](models/cv/classification/resnetv1d50/ixrt) | 4.3.0 | | | INT8 | | [✅](models/cv/classification/resnetv1d50/ixrt) | 4.3.0 | | ResNeXt50_32x4d | FP16 | [✅](models/cv/classification/resnext50_32x4d/igie) | [✅](models/cv/classification/resnext50_32x4d/ixrt) | 4.3.0 | -| ResNeXt101_64x4d | FP16 | [✅](models/cv/classification/resnext101_64x4d/igie) | [✅](models/cv/classification/resnext101_64x4d/ixrt) | 4.3.0 | -| ResNeXt101_32x8d | FP16 | [✅](models/cv/classification/resnext101_32x8d/igie) | [✅](models/cv/classification/resnext101_32x8d/ixrt) | 4.3.0 | +| ResNeXt101_64x4d | FP16 | [✅](models/cv/classification/resnext101_64x4d/igie) | [✅](models/cv/classification/resnext101_64x4d/ixrt) | 4.3.0 | +| ResNeXt101_32x8d | FP16 | [✅](models/cv/classification/resnext101_32x8d/igie) | [✅](models/cv/classification/resnext101_32x8d/ixrt) | 4.3.0 | | SEResNet50 | FP16 | [✅](models/cv/classification/se_resnet50/igie) | | 4.3.0 | | ShuffleNetV1 | FP16 | | [✅](models/cv/classification/shufflenet_v1/ixrt) | 4.3.0 | | ShuffleNetV2_x0_5 | FP16 | [✅](models/cv/classification/shufflenetv2_x0_5/igie) | [✅](models/cv/classification/shufflenetv2_x0_5/ixrt) | 4.3.0 | @@ -245,7 +245,7 @@ | InternVL2-4B | [✅](models/multimodal/vision_language_model/intern_vl/vllm) | | 4.3.0 | | LLaVA | [✅](models/multimodal/vision_language_model/llava/vllm) | | 4.3.0 | | LLaVA-Next-Video-7B | [✅](models/multimodal/vision_language_model/llava_next_video_7b/vllm) | | 4.3.0 | -| Llama-3.2 | [✅](models/multimodal/vision_language_model/llama-3.2/vllm) | | 4.3.0 | +| Llama-3.2 | [✅](models/multimodal/vision_language_model/llama-3.2/vllm) | | 4.3.0 | | MiniCPM-V 2 | [✅](models/multimodal/vision_language_model/minicpm_v/vllm) | | 4.3.0 | | Pixtral | [✅](models/multimodal/vision_language_model/pixtral/vllm) | | 4.3.0 | diff --git a/README_en.md b/README_en.md index df00bc69..9c6acc03 100644 --- a/README_en.md +++ b/README_en.md @@ -36,27 +36,27 @@ inference to be expanded in the future. | Model | vLLM | TRT-LLM | TGI | IXUCA SDK | |-------------------------------|--------------------------------------------------------|---------------------------------------|------------------------------------|-----------| -| Baichuan2-7B | [✅](models/nlp/llm/baichuan2-7b/vllm) | | | 4.2.0 | -| ChatGLM-3-6B | [✅](models/nlp/llm/chatglm3-6b/vllm) | | | 4.2.0 | -| ChatGLM-3-6B-32K | [✅](models/nlp/llm/chatglm3-6b-32k/vllm) | | | 4.2.0 | -| DeepSeek-R1-Distill-Llama-8B | [✅](models/nlp/llm/deepseek-r1-distill-llama-8b/vllm) | | | 4.2.0 | -| DeepSeek-R1-Distill-Llama-70B | [✅](models/nlp/llm/deepseek-r1-distill-llama-70b/vllm) | | | 4.2.0 | -| DeepSeek-R1-Distill-Qwen-1.5B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-1.5b/vllm) | | | 4.2.0 | -| DeepSeek-R1-Distill-Qwen-7B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-7b/vllm) | | | 4.2.0 | -| DeepSeek-R1-Distill-Qwen-14B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-14b/vllm) | | | 4.2.0 | -| DeepSeek-R1-Distill-Qwen-32B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-32b/vllm) | | | 4.2.0 | -| Llama2-7B | [✅](models/nlp/llm/llama2-7b/vllm) | [✅](models/nlp/llm/llama2-7b/trtllm) | | 4.2.0 | -| Llama2-13B | | [✅](models/nlp/llm/llama2-13b/trtllm) | | 4.2.0 | -| Llama2-70B | | [✅](models/nlp/llm/llama2-70b/trtllm) | | 4.2.0 | -| Llama3-70B | [✅](models/nlp/llm/llama3-70b/vllm) | | | 4.2.0 | -| Qwen-7B | [✅](models/nlp/llm/qwen-7b/vllm) | | | 4.2.0 | -| Qwen1.5-7B | [✅](models/nlp/llm/qwen1.5-7b/vllm) | | [✅](models/nlp/llm/qwen1.5-7b/tgi) | 4.2.0 | -| Qwen1.5-14B | [✅](models/nlp/llm/qwen1.5-14b/vllm) | | | 4.2.0 | -| Qwen1.5-32B Chat | [✅](models/nlp/llm/qwen1.5-32b/vllm) | | | 4.2.0 | -| Qwen1.5-72B | [✅](models/nlp/llm/qwen1.5-72b/vllm) | | | 4.2.0 | -| Qwen2-7B Instruct | [✅](models/nlp/llm/qwen2-7b/vllm) | | | 4.2.0 | -| Qwen2-72B Instruct | [✅](models/nlp/llm/qwen2-72b/vllm) | | | 4.2.0 | -| StableLM2-1.6B | [✅](models/nlp/llm/stablelm/vllm) | | | 4.2.0 | +| Baichuan2-7B | [✅](models/nlp/llm/baichuan2-7b/vllm) | | | 4.3.0 | +| ChatGLM-3-6B | [✅](models/nlp/llm/chatglm3-6b/vllm) | | | 4.3.0 | +| ChatGLM-3-6B-32K | [✅](models/nlp/llm/chatglm3-6b-32k/vllm) | | | 4.3.0 | +| DeepSeek-R1-Distill-Llama-8B | [✅](models/nlp/llm/deepseek-r1-distill-llama-8b/vllm) | | | 4.3.0 | +| DeepSeek-R1-Distill-Llama-70B | [✅](models/nlp/llm/deepseek-r1-distill-llama-70b/vllm) | | | 4.3.0 | +| DeepSeek-R1-Distill-Qwen-1.5B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-1.5b/vllm) | | | 4.3.0 | +| DeepSeek-R1-Distill-Qwen-7B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-7b/vllm) | | | 4.3.0 | +| DeepSeek-R1-Distill-Qwen-14B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-14b/vllm) | | | 4.3.0 | +| DeepSeek-R1-Distill-Qwen-32B | [✅](models/nlp/llm/deepseek-r1-distill-qwen-32b/vllm) | | | 4.3.0 | +| Llama2-7B | [✅](models/nlp/llm/llama2-7b/vllm) | [✅](models/nlp/llm/llama2-7b/trtllm) | | 4.3.0 | +| Llama2-13B | | [✅](models/nlp/llm/llama2-13b/trtllm) | | 4.3.0 | +| Llama2-70B | | [✅](models/nlp/llm/llama2-70b/trtllm) | | 4.3.0 | +| Llama3-70B | [✅](models/nlp/llm/llama3-70b/vllm) | | | 4.3.0 | +| Qwen-7B | [✅](models/nlp/llm/qwen-7b/vllm) | | | 4.3.0 | +| Qwen1.5-7B | [✅](models/nlp/llm/qwen1.5-7b/vllm) | | [✅](models/nlp/llm/qwen1.5-7b/tgi) | 4.3.0 | +| Qwen1.5-14B | [✅](models/nlp/llm/qwen1.5-14b/vllm) | | | 4.3.0 | +| Qwen1.5-32B Chat | [✅](models/nlp/llm/qwen1.5-32b/vllm) | | | 4.3.0 | +| Qwen1.5-72B | [✅](models/nlp/llm/qwen1.5-72b/vllm) | | | 4.3.0 | +| Qwen2-7B Instruct | [✅](models/nlp/llm/qwen2-7b/vllm) | | | 4.3.0 | +| Qwen2-72B Instruct | [✅](models/nlp/llm/qwen2-72b/vllm) | | | 4.3.0 | +| StableLM2-1.6B | [✅](models/nlp/llm/stablelm/vllm) | | | 4.3.0 | ### Computer Vision @@ -64,200 +64,200 @@ inference to be expanded in the future. | Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------------------|-------|--------------------------------------------------------|-----------------------------------------------------------|-----------| -| AlexNet | FP16 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.2.0 | -| CLIP | FP16 | [✅](models/cv/classification/clip/igie) | | 4.2.0 | -| Conformer-B | FP16 | [✅](models/cv/classification/conformer_base/igie) | | 4.2.0 | -| ConvNeXt-Base | FP16 | [✅](models/cv/classification/convnext_base/igie) | [✅](models/cv/classification/convnext_base/ixrt) | 4.2.0 | -| ConvNext-S | FP16 | [✅](models/cv/classification/convnext_s/igie) | | 4.2.0 | -| ConvNeXt-Small | FP16 | [✅](models/cv/classification/convnext_small/igie) | [✅](models/cv/classification/convnext_small/ixrt) | 4.2.0 | -| ConvNeXt-Tiny | FP16 | [✅](models/cv/classification/convnext_tiny/igie) | | 4.2.0 | -| CSPDarkNet53 | FP16 | [✅](models/cv/classification/cspdarknet53/igie) | [✅](models/cv/classification/cspdarknet53/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/cspdarknet53/ixrt) | 4.2.0 | -| CSPResNet50 | FP16 | [✅](models/cv/classification/cspresnet50/igie) | [✅](models/cv/classification/cspresnet50/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/cspresnet50/ixrt) | 4.2.0 | -| CSPResNeXt50 | FP16 | [✅](models/cv/classification/cspresnext50/igie) | | 4.2.0 | -| DeiT-tiny | FP16 | [✅](models/cv/classification/deit_tiny/igie) | [✅](models/cv/classification/deit_tiny/ixrt) | 4.2.0 | -| DenseNet121 | FP16 | [✅](models/cv/classification/densenet121/igie) | [✅](models/cv/classification/densenet121/ixrt) | 4.2.0 | -| DenseNet161 | FP16 | [✅](models/cv/classification/densenet161/igie) | [✅](models/cv/classification/densenet161/ixrt) | 4.2.0 | -| DenseNet169 | FP16 | [✅](models/cv/classification/densenet169/igie) | [✅](models/cv/classification/densenet169/ixrt) | 4.2.0 | -| DenseNet201 | FP16 | [✅](models/cv/classification/densenet201/igie) | [✅](models/cv/classification/densenet201/ixrt) | 4.2.0 | -| EfficientNet-B0 | FP16 | [✅](models/cv/classification/efficientnet_b0/igie) | [✅](models/cv/classification/efficientnet_b0/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/efficientnet_b0/ixrt) | 4.2.0 | -| EfficientNet-B1 | FP16 | [✅](models/cv/classification/efficientnet_b1/igie) | [✅](models/cv/classification/efficientnet_b1/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/efficientnet_b1/ixrt) | 4.2.0 | -| EfficientNet-B2 | FP16 | [✅](models/cv/classification/efficientnet_b2/igie) | [✅](models/cv/classification/efficientnet_b2/ixrt) | 4.2.0 | -| EfficientNet-B3 | FP16 | [✅](models/cv/classification/efficientnet_b3/igie) | [✅](models/cv/classification/efficientnet_b3/ixrt) | 4.2.0 | -| EfficientNet-B4 | FP16 | [✅](models/cv/classification/efficientnet_b4/igie) | | 4.2.0 | -| EfficientNet-B5 | FP16 | [✅](models/cv/classification/efficientnet_b5/igie) | | 4.2.0 | -| EfficientNetV2 | FP16 | [✅](models/cv/classification/efficientnet_v2/igie) | [✅](models/cv/classification/efficientnet_v2/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/efficientnet_v2/ixrt) | 4.2.0 | -| EfficientNetv2_rw_t | FP16 | [✅](models/cv/classification/efficientnetv2_rw_t/igie) | [✅](models/cv/classification/efficientnetv2_rw_t/ixrt) | 4.2.0 | -| EfficientNetv2_s | FP16 | [✅](models/cv/classification/efficientnet_v2_s/igie) | [✅](models/cv/classification/efficientnet_v2_s/ixrt) | 4.2.0 | -| GoogLeNet | FP16 | [✅](models/cv/classification/googlenet/igie) | [✅](models/cv/classification/googlenet/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/googlenet/igie) | [✅](models/cv/classification/googlenet/ixrt) | 4.2.0 | -| HRNet-W18 | FP16 | [✅](models/cv/classification/hrnet_w18/igie) | [✅](models/cv/classification/hrnet_w18/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/hrnet_w18/ixrt) | 4.2.0 | -| InceptionV3 | FP16 | [✅](models/cv/classification/inception_v3/igie) | [✅](models/cv/classification/inception_v3/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/inception_v3/igie) | [✅](models/cv/classification/inception_v3/ixrt) | 4.2.0 | -| Inception-ResNet-V2 | FP16 | | [✅](models/cv/classification/inception_resnet_v2/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/inception_resnet_v2/ixrt) | 4.2.0 | -| Mixer_B | FP16 | [✅](models/cv/classification/mlp_mixer_base/igie) | | 4.2.0 | -| MNASNet0_5 | FP16 | [✅](models/cv/classification/mnasnet0_5/igie) | | 4.2.0 | -| MNASNet0_75 | FP16 | [✅](models/cv/classification/mnasnet0_75/igie) | | 4.2.0 | -| MNASNet1_0 | FP16 | [✅](models/cv/classification/mnasnet1_0/igie) | | 4.2.0 | -| MobileNetV2 | FP16 | [✅](models/cv/classification/mobilenet_v2/igie) | [✅](models/cv/classification/mobilenet_v2/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/mobilenet_v2/igie) | [✅](models/cv/classification/mobilenet_v2/ixrt) | 4.2.0 | -| MobileNetV3_Large | FP16 | [✅](models/cv/classification/mobilenet_v3_large/igie) | | 4.2.0 | -| MobileNetV3_Small | FP16 | [✅](models/cv/classification/mobilenet_v3/igie) | [✅](models/cv/classification/mobilenet_v3/ixrt) | 4.2.0 | +| AlexNet | FP16 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/alexnet/igie) | [✅](models/cv/classification/alexnet/ixrt) | 4.3.0 | +| CLIP | FP16 | [✅](models/cv/classification/clip/igie) | | 4.3.0 | +| Conformer-B | FP16 | [✅](models/cv/classification/conformer_base/igie) | | 4.3.0 | +| ConvNeXt-Base | FP16 | [✅](models/cv/classification/convnext_base/igie) | [✅](models/cv/classification/convnext_base/ixrt) | 4.3.0 | +| ConvNext-S | FP16 | [✅](models/cv/classification/convnext_s/igie) | | 4.3.0 | +| ConvNeXt-Small | FP16 | [✅](models/cv/classification/convnext_small/igie) | [✅](models/cv/classification/convnext_small/ixrt) | 4.3.0 | +| ConvNeXt-Tiny | FP16 | [✅](models/cv/classification/convnext_tiny/igie) | | 4.3.0 | +| CSPDarkNet53 | FP16 | [✅](models/cv/classification/cspdarknet53/igie) | [✅](models/cv/classification/cspdarknet53/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/cspdarknet53/ixrt) | 4.3.0 | +| CSPResNet50 | FP16 | [✅](models/cv/classification/cspresnet50/igie) | [✅](models/cv/classification/cspresnet50/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/cspresnet50/ixrt) | 4.3.0 | +| CSPResNeXt50 | FP16 | [✅](models/cv/classification/cspresnext50/igie) | | 4.3.0 | +| DeiT-tiny | FP16 | [✅](models/cv/classification/deit_tiny/igie) | [✅](models/cv/classification/deit_tiny/ixrt) | 4.3.0 | +| DenseNet121 | FP16 | [✅](models/cv/classification/densenet121/igie) | [✅](models/cv/classification/densenet121/ixrt) | 4.3.0 | +| DenseNet161 | FP16 | [✅](models/cv/classification/densenet161/igie) | [✅](models/cv/classification/densenet161/ixrt) | 4.3.0 | +| DenseNet169 | FP16 | [✅](models/cv/classification/densenet169/igie) | [✅](models/cv/classification/densenet169/ixrt) | 4.3.0 | +| DenseNet201 | FP16 | [✅](models/cv/classification/densenet201/igie) | [✅](models/cv/classification/densenet201/ixrt) | 4.3.0 | +| EfficientNet-B0 | FP16 | [✅](models/cv/classification/efficientnet_b0/igie) | [✅](models/cv/classification/efficientnet_b0/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/efficientnet_b0/ixrt) | 4.3.0 | +| EfficientNet-B1 | FP16 | [✅](models/cv/classification/efficientnet_b1/igie) | [✅](models/cv/classification/efficientnet_b1/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/efficientnet_b1/ixrt) | 4.3.0 | +| EfficientNet-B2 | FP16 | [✅](models/cv/classification/efficientnet_b2/igie) | [✅](models/cv/classification/efficientnet_b2/ixrt) | 4.3.0 | +| EfficientNet-B3 | FP16 | [✅](models/cv/classification/efficientnet_b3/igie) | [✅](models/cv/classification/efficientnet_b3/ixrt) | 4.3.0 | +| EfficientNet-B4 | FP16 | [✅](models/cv/classification/efficientnet_b4/igie) | | 4.3.0 | +| EfficientNet-B5 | FP16 | [✅](models/cv/classification/efficientnet_b5/igie) | | 4.3.0 | +| EfficientNetV2 | FP16 | [✅](models/cv/classification/efficientnet_v2/igie) | [✅](models/cv/classification/efficientnet_v2/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/efficientnet_v2/ixrt) | 4.3.0 | +| EfficientNetv2_rw_t | FP16 | [✅](models/cv/classification/efficientnetv2_rw_t/igie) | [✅](models/cv/classification/efficientnetv2_rw_t/ixrt) | 4.3.0 | +| EfficientNetv2_s | FP16 | [✅](models/cv/classification/efficientnet_v2_s/igie) | [✅](models/cv/classification/efficientnet_v2_s/ixrt) | 4.3.0 | +| GoogLeNet | FP16 | [✅](models/cv/classification/googlenet/igie) | [✅](models/cv/classification/googlenet/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/googlenet/igie) | [✅](models/cv/classification/googlenet/ixrt) | 4.3.0 | +| HRNet-W18 | FP16 | [✅](models/cv/classification/hrnet_w18/igie) | [✅](models/cv/classification/hrnet_w18/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/hrnet_w18/ixrt) | 4.3.0 | +| InceptionV3 | FP16 | [✅](models/cv/classification/inception_v3/igie) | [✅](models/cv/classification/inception_v3/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/inception_v3/igie) | [✅](models/cv/classification/inception_v3/ixrt) | 4.3.0 | +| Inception-ResNet-V2 | FP16 | | [✅](models/cv/classification/inception_resnet_v2/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/inception_resnet_v2/ixrt) | 4.3.0 | +| Mixer_B | FP16 | [✅](models/cv/classification/mlp_mixer_base/igie) | | 4.3.0 | +| MNASNet0_5 | FP16 | [✅](models/cv/classification/mnasnet0_5/igie) | | 4.3.0 | +| MNASNet0_75 | FP16 | [✅](models/cv/classification/mnasnet0_75/igie) | | 4.3.0 | +| MNASNet1_0 | FP16 | [✅](models/cv/classification/mnasnet1_0/igie) | | 4.3.0 | +| MobileNetV2 | FP16 | [✅](models/cv/classification/mobilenet_v2/igie) | [✅](models/cv/classification/mobilenet_v2/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/mobilenet_v2/igie) | [✅](models/cv/classification/mobilenet_v2/ixrt) | 4.3.0 | +| MobileNetV3_Large | FP16 | [✅](models/cv/classification/mobilenet_v3_large/igie) | | 4.3.0 | +| MobileNetV3_Small | FP16 | [✅](models/cv/classification/mobilenet_v3/igie) | [✅](models/cv/classification/mobilenet_v3/ixrt) | 4.3.0 | | MViTv2_base | FP16 | [✅](models/cv/classification/mvitv2_base/igie) | | 4.2.0 | -| RegNet_x_16gf | FP16 | [✅](models/cv/classification/regnet_x_16gf/igie) | | 4.2.0 | -| RegNet_x_1_6gf | FP16 | [✅](models/cv/classification/regnet_x_1_6gf/igie) | | 4.2.0 | -| RegNet_x_3_2gf | FP16 | [✅](models/cv/classification/regnet_x_3_2gf/igie) | | 4.2.0 | -| RegNet_y_1_6gf | FP16 | [✅](models/cv/classification/regnet_y_1_6gf/igie) | | 4.2.0 | -| RegNet_y_16gf | FP16 | [✅](models/cv/classification/regnet_y_16gf/igie) | | 4.2.0 | -| RepVGG | FP16 | [✅](models/cv/classification/repvgg/igie) | [✅](models/cv/classification/repvgg/ixrt) | 4.2.0 | -| Res2Net50 | FP16 | [✅](models/cv/classification/res2net50/igie) | [✅](models/cv/classification/res2net50/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/res2net50/ixrt) | 4.2.0 | -| ResNeSt50 | FP16 | [✅](models/cv/classification/resnest50/igie) | | 4.2.0 | -| ResNet101 | FP16 | [✅](models/cv/classification/resnet101/igie) | [✅](models/cv/classification/resnet101/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/resnet101/igie) | [✅](models/cv/classification/resnet101/ixrt) | 4.2.0 | -| ResNet152 | FP16 | [✅](models/cv/classification/resnet152/igie) | | 4.2.0 | -| | INT8 | [✅](models/cv/classification/resnet152/igie) | | 4.2.0 | -| ResNet18 | FP16 | [✅](models/cv/classification/resnet18/igie) | [✅](models/cv/classification/resnet18/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/resnet18/igie) | [✅](models/cv/classification/resnet18/ixrt) | 4.2.0 | -| ResNet34 | FP16 | | [✅](models/cv/classification/resnet34/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/resnet34/ixrt) | 4.2.0 | -| ResNet50 | FP16 | [✅](models/cv/classification/resnet50/igie) | [✅](models/cv/classification/resnet50/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/resnet50/igie) | | 4.2.0 | -| ResNetV1D50 | FP16 | [✅](models/cv/classification/resnetv1d50/igie) | [✅](models/cv/classification/resnetv1d50/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/resnetv1d50/ixrt) | 4.2.0 | -| ResNeXt50_32x4d | FP16 | [✅](models/cv/classification/resnext50_32x4d/igie) | [✅](models/cv/classification/resnext50_32x4d/ixrt) | 4.2.0 | -| ResNeXt101_64x4d | FP16 | [✅](models/cv/classification/resnext101_64x4d/igie) | [✅](models/cv/classification/resnext101_64x4d/ixrt) | 4.2.0 | -| ResNeXt101_32x8d | FP16 | [✅](models/cv/classification/resnext101_32x8d/igie) | [✅](models/cv/classification/resnext101_32x8d/ixrt) | 4.2.0 | -| SEResNet50 | FP16 | [✅](models/cv/classification/se_resnet50/igie) | | 4.2.0 | -| ShuffleNetV1 | FP16 | | [✅](models/cv/classification/shufflenet_v1/ixrt) | 4.2.0 | -| ShuffleNetV2_x0_5 | FP16 | [✅](models/cv/classification/shufflenetv2_x0_5/igie) | [✅](models/cv/classification/shufflenetv2_x0_5/ixrt) | 4.2.0 | -| ShuffleNetV2_x1_0 | FP16 | [✅](models/cv/classification/shufflenetv2_x1_0/igie) | [✅](models/cv/classification/shufflenetv2_x1_0/ixrt) | 4.2.0 | -| ShuffleNetV2_x1_5 | FP16 | [✅](models/cv/classification/shufflenetv2_x1_5/igie) | [✅](models/cv/classification/shufflenetv2_x1_5/ixrt) | 4.2.0 | -| ShuffleNetV2_x2_0 | FP16 | [✅](models/cv/classification/shufflenetv2_x2_0/igie) | [✅](models/cv/classification/shufflenetv2_x2_0/ixrt) | 4.2.0 | -| SqueezeNet 1.0 | FP16 | [✅](models/cv/classification/squeezenet_v1_0/igie) | [✅](models/cv/classification/squeezenet_v1_0/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/squeezenet_v1_0/ixrt) | 4.2.0 | -| SqueezeNet 1.1 | FP16 | [✅](models/cv/classification/squeezenet_v1_1/igie) | [✅](models/cv/classification/squeezenet_v1_1/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/classification/squeezenet_v1_1/ixrt) | 4.2.0 | -| SVT Base | FP16 | [✅](models/cv/classification/svt_base/igie) | | 4.2.0 | -| Swin Transformer | FP16 | [✅](models/cv/classification/swin_transformer/igie) | | 4.2.0 | -| Swin Transformer Large | FP16 | | [✅](models/cv/classification/swin_transformer_large/ixrt) | 4.2.0 | -| Twins_PCPVT | FP16 | [✅](models/cv/classification/twins_pcpvt/igie) | | 4.2.0 | -| VAN_B0 | FP16 | [✅](models/cv/classification/van_b0/igie) | | 4.2.0 | -| VGG11 | FP16 | [✅](models/cv/classification/vgg11/igie) | | 4.2.0 | -| VGG16 | FP16 | [✅](models/cv/classification/vgg16/igie) | [✅](models/cv/classification/vgg16/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/vgg16/igie) | | 4.2.0 | -| VGG19 | FP16 | [✅](models/cv/classification/vgg19/igie) | | 4.2.0 | -| VGG19_BN | FP16 | [✅](models/cv/classification/vgg19_bn/igie) | | 4.2.0 | -| ViT | FP16 | [✅](models/cv/classification/vit/igie) | | 4.2.0 | -| Wide ResNet50 | FP16 | [✅](models/cv/classification/wide_resnet50/igie) | [✅](models/cv/classification/wide_resnet50/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/classification/wide_resnet50/igie) | [✅](models/cv/classification/wide_resnet50/ixrt) | 4.2.0 | -| Wide ResNet101 | FP16 | [✅](models/cv/classification/wide_resnet101/igie) | | 4.2.0 | +| RegNet_x_16gf | FP16 | [✅](models/cv/classification/regnet_x_16gf/igie) | | 4.3.0 | +| RegNet_x_1_6gf | FP16 | [✅](models/cv/classification/regnet_x_1_6gf/igie) | | 4.3.0 | +| RegNet_x_3_2gf | FP16 | [✅](models/cv/classification/regnet_x_3_2gf/igie) | | 4.3.0 | +| RegNet_y_1_6gf | FP16 | [✅](models/cv/classification/regnet_y_1_6gf/igie) | | 4.3.0 | +| RegNet_y_16gf | FP16 | [✅](models/cv/classification/regnet_y_16gf/igie) | | 4.3.0 | +| RepVGG | FP16 | [✅](models/cv/classification/repvgg/igie) | [✅](models/cv/classification/repvgg/ixrt) | 4.3.0 | +| Res2Net50 | FP16 | [✅](models/cv/classification/res2net50/igie) | [✅](models/cv/classification/res2net50/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/res2net50/ixrt) | 4.3.0 | +| ResNeSt50 | FP16 | [✅](models/cv/classification/resnest50/igie) | | 4.3.0 | +| ResNet101 | FP16 | [✅](models/cv/classification/resnet101/igie) | [✅](models/cv/classification/resnet101/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/resnet101/igie) | [✅](models/cv/classification/resnet101/ixrt) | 4.3.0 | +| ResNet152 | FP16 | [✅](models/cv/classification/resnet152/igie) | | 4.3.0 | +| | INT8 | [✅](models/cv/classification/resnet152/igie) | | 4.3.0 | +| ResNet18 | FP16 | [✅](models/cv/classification/resnet18/igie) | [✅](models/cv/classification/resnet18/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/resnet18/igie) | [✅](models/cv/classification/resnet18/ixrt) | 4.3.0 | +| ResNet34 | FP16 | | [✅](models/cv/classification/resnet34/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/resnet34/ixrt) | 4.3.0 | +| ResNet50 | FP16 | [✅](models/cv/classification/resnet50/igie) | [✅](models/cv/classification/resnet50/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/resnet50/igie) | | 4.3.0 | +| ResNetV1D50 | FP16 | [✅](models/cv/classification/resnetv1d50/igie) | [✅](models/cv/classification/resnetv1d50/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/resnetv1d50/ixrt) | 4.3.0 | +| ResNeXt50_32x4d | FP16 | [✅](models/cv/classification/resnext50_32x4d/igie) | [✅](models/cv/classification/resnext50_32x4d/ixrt) | 4.3.0 | +| ResNeXt101_64x4d | FP16 | [✅](models/cv/classification/resnext101_64x4d/igie) | [✅](models/cv/classification/resnext101_64x4d/ixrt) | 4.3.0 | +| ResNeXt101_32x8d | FP16 | [✅](models/cv/classification/resnext101_32x8d/igie) | [✅](models/cv/classification/resnext101_32x8d/ixrt) | 4.3.0 | +| SEResNet50 | FP16 | [✅](models/cv/classification/se_resnet50/igie) | | 4.3.0 | +| ShuffleNetV1 | FP16 | | [✅](models/cv/classification/shufflenet_v1/ixrt) | 4.3.0 | +| ShuffleNetV2_x0_5 | FP16 | [✅](models/cv/classification/shufflenetv2_x0_5/igie) | [✅](models/cv/classification/shufflenetv2_x0_5/ixrt) | 4.3.0 | +| ShuffleNetV2_x1_0 | FP16 | [✅](models/cv/classification/shufflenetv2_x1_0/igie) | [✅](models/cv/classification/shufflenetv2_x1_0/ixrt) | 4.3.0 | +| ShuffleNetV2_x1_5 | FP16 | [✅](models/cv/classification/shufflenetv2_x1_5/igie) | [✅](models/cv/classification/shufflenetv2_x1_5/ixrt) | 4.3.0 | +| ShuffleNetV2_x2_0 | FP16 | [✅](models/cv/classification/shufflenetv2_x2_0/igie) | [✅](models/cv/classification/shufflenetv2_x2_0/ixrt) | 4.3.0 | +| SqueezeNet 1.0 | FP16 | [✅](models/cv/classification/squeezenet_v1_0/igie) | [✅](models/cv/classification/squeezenet_v1_0/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/squeezenet_v1_0/ixrt) | 4.3.0 | +| SqueezeNet 1.1 | FP16 | [✅](models/cv/classification/squeezenet_v1_1/igie) | [✅](models/cv/classification/squeezenet_v1_1/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/classification/squeezenet_v1_1/ixrt) | 4.3.0 | +| SVT Base | FP16 | [✅](models/cv/classification/svt_base/igie) | | 4.3.0 | +| Swin Transformer | FP16 | [✅](models/cv/classification/swin_transformer/igie) | | 4.3.0 | +| Swin Transformer Large | FP16 | | [✅](models/cv/classification/swin_transformer_large/ixrt) | 4.3.0 | +| Twins_PCPVT | FP16 | [✅](models/cv/classification/twins_pcpvt/igie) | | 4.3.0 | +| VAN_B0 | FP16 | [✅](models/cv/classification/van_b0/igie) | | 4.3.0 | +| VGG11 | FP16 | [✅](models/cv/classification/vgg11/igie) | | 4.3.0 | +| VGG16 | FP16 | [✅](models/cv/classification/vgg16/igie) | [✅](models/cv/classification/vgg16/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/vgg16/igie) | | 4.3.0 | +| VGG19 | FP16 | [✅](models/cv/classification/vgg19/igie) | | 4.3.0 | +| VGG19_BN | FP16 | [✅](models/cv/classification/vgg19_bn/igie) | | 4.3.0 | +| ViT | FP16 | [✅](models/cv/classification/vit/igie) | | 4.3.0 | +| Wide ResNet50 | FP16 | [✅](models/cv/classification/wide_resnet50/igie) | [✅](models/cv/classification/wide_resnet50/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/classification/wide_resnet50/igie) | [✅](models/cv/classification/wide_resnet50/ixrt) | 4.3.0 | +| Wide ResNet101 | FP16 | [✅](models/cv/classification/wide_resnet101/igie) | | 4.3.0 | #### Object Detection | Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------|-------|-------------------------------------------------|-------------------------------------------------|-----------| -| ATSS | FP16 | [✅](models/cv/object_detection/atss/igie) | | 4.2.0 | -| CenterNet | FP16 | [✅](models/cv/object_detection/centernet/igie) | [✅](models/cv/object_detection/centernet/ixrt) | 4.2.0 | -| DETR | FP16 | | [✅](models/cv/object_detection/detr/ixrt) | 4.2.0 | -| FCOS | FP16 | [✅](models/cv/object_detection/fcos/igie) | [✅](models/cv/object_detection/fcos/ixrt) | 4.2.0 | -| FoveaBox | FP16 | [✅](models/cv/object_detection/foveabox/igie) | [✅](models/cv/object_detection/foveabox/ixrt) | 4.2.0 | -| FSAF | FP16 | [✅](models/cv/object_detection/fsaf/igie) | [✅](models/cv/object_detection/fsaf/ixrt) | 4.2.0 | -| GFL | FP16 | [✅](models/cv/object_detection/gfl/igie) | | 4.2.0 | -| HRNet | FP16 | [✅](models/cv/object_detection/hrnet/igie) | [✅](models/cv/object_detection/hrnet/ixrt) | 4.2.0 | -| PAA | FP16 | [✅](models/cv/object_detection/paa/igie) | | 4.2.0 | -| RetinaFace | FP16 | [✅](models/cv/object_detection/retinaface/igie) | [✅](models/cv/object_detection/retinaface/ixrt) | 4.2.0 | -| RetinaNet | FP16 | [✅](models/cv/object_detection/retinanet/igie) | | 4.2.0 | -| RTMDet | FP16 | [✅](models/cv/object_detection/rtmdet/igie) | | 4.2.0 | -| SABL | FP16 | [✅](models/cv/object_detection/sabl/igie) | | 4.2.0 | -| YOLOv3 | FP16 | [✅](models/cv/object_detection/yolov3/igie) | [✅](models/cv/object_detection/yolov3/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/object_detection/yolov3/igie) | [✅](models/cv/object_detection/yolov3/ixrt) | 4.2.0 | -| YOLOv4 | FP16 | [✅](models/cv/object_detection/yolov4/igie) | [✅](models/cv/object_detection/yolov4/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/object_detection/yolov4/igie16) | [✅](models/cv/object_detection/yolov4/ixrt16) | 4.2.0 | -| YOLOv5 | FP16 | [✅](models/cv/object_detection/yolov5/igie) | [✅](models/cv/object_detection/yolov5/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/object_detection/yolov5/igie) | [✅](models/cv/object_detection/yolov5/ixrt) | 4.2.0 | -| YOLOv5s | FP16 | | [✅](models/cv/object_detection/yolov5s/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/object_detection/yolov5s/ixrt) | 4.2.0 | -| YOLOv6 | FP16 | [✅](models/cv/object_detection/yolov6/igie) | [✅](models/cv/object_detection/yolov6/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/object_detection/yolov6/ixrt) | 4.2.0 | -| YOLOv7 | FP16 | [✅](models/cv/object_detection/yolov7/igie) | [✅](models/cv/object_detection/yolov7/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/object_detection/yolov7/igie) | [✅](models/cv/object_detection/yolov7/ixrt) | 4.2.0 | -| YOLOv8 | FP16 | [✅](models/cv/object_detection/yolov8/igie) | [✅](models/cv/object_detection/yolov8/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/object_detection/yolov8/igie) | [✅](models/cv/object_detection/yolov8/ixrt) | 4.2.0 | -| YOLOv9 | FP16 | [✅](models/cv/object_detection/yolov9/igie) | [✅](models/cv/object_detection/yolov9/ixrt) | 4.2.0 | -| YOLOv10 | FP16 | [✅](models/cv/object_detection/yolov10/igie) | [✅](models/cv/object_detection/yolov10/ixrt) | 4.2.0 | -| YOLOv11 | FP16 | [✅](models/cv/object_detection/yolov11/igie) | [✅](models/cv/object_detection/yolov11/ixrt) | 4.2.0 | -| YOLOv12 | FP16 | [✅](models/cv/object_detection/yolov12/igie) | | 4.2.0 | -| YOLOX | FP16 | [✅](models/cv/object_detection/yolox/igie) | [✅](models/cv/object_detection/yolox/ixrt) | 4.2.0 | -| | INT8 | [✅](models/cv/object_detection/yolox/igie) | [✅](models/cv/object_detection/yolox/ixrt) | 4.2.0 | +| ATSS | FP16 | [✅](models/cv/object_detection/atss/igie) | | 4.3.0 | +| CenterNet | FP16 | [✅](models/cv/object_detection/centernet/igie) | [✅](models/cv/object_detection/centernet/ixrt) | 4.3.0 | +| DETR | FP16 | | [✅](models/cv/object_detection/detr/ixrt) | 4.3.0 | +| FCOS | FP16 | [✅](models/cv/object_detection/fcos/igie) | [✅](models/cv/object_detection/fcos/ixrt) | 4.3.0 | +| FoveaBox | FP16 | [✅](models/cv/object_detection/foveabox/igie) | [✅](models/cv/object_detection/foveabox/ixrt) | 4.3.0 | +| FSAF | FP16 | [✅](models/cv/object_detection/fsaf/igie) | [✅](models/cv/object_detection/fsaf/ixrt) | 4.3.0 | +| GFL | FP16 | [✅](models/cv/object_detection/gfl/igie) | | 4.3.0 | +| HRNet | FP16 | [✅](models/cv/object_detection/hrnet/igie) | [✅](models/cv/object_detection/hrnet/ixrt) | 4.3.0 | +| PAA | FP16 | [✅](models/cv/object_detection/paa/igie) | | 4.3.0 | +| RetinaFace | FP16 | [✅](models/cv/object_detection/retinaface/igie) | [✅](models/cv/object_detection/retinaface/ixrt) | 4.3.0 | +| RetinaNet | FP16 | [✅](models/cv/object_detection/retinanet/igie) | | 4.3.0 | +| RTMDet | FP16 | [✅](models/cv/object_detection/rtmdet/igie) | | 4.3.0 | +| SABL | FP16 | [✅](models/cv/object_detection/sabl/igie) | | 4.3.0 | +| YOLOv3 | FP16 | [✅](models/cv/object_detection/yolov3/igie) | [✅](models/cv/object_detection/yolov3/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/object_detection/yolov3/igie) | [✅](models/cv/object_detection/yolov3/ixrt) | 4.3.0 | +| YOLOv4 | FP16 | [✅](models/cv/object_detection/yolov4/igie) | [✅](models/cv/object_detection/yolov4/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/object_detection/yolov4/igie16) | [✅](models/cv/object_detection/yolov4/ixrt16) | 4.3.0 | +| YOLOv5 | FP16 | [✅](models/cv/object_detection/yolov5/igie) | [✅](models/cv/object_detection/yolov5/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/object_detection/yolov5/igie) | [✅](models/cv/object_detection/yolov5/ixrt) | 4.3.0 | +| YOLOv5s | FP16 | | [✅](models/cv/object_detection/yolov5s/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/object_detection/yolov5s/ixrt) | 4.3.0 | +| YOLOv6 | FP16 | [✅](models/cv/object_detection/yolov6/igie) | [✅](models/cv/object_detection/yolov6/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/object_detection/yolov6/ixrt) | 4.3.0 | +| YOLOv7 | FP16 | [✅](models/cv/object_detection/yolov7/igie) | [✅](models/cv/object_detection/yolov7/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/object_detection/yolov7/igie) | [✅](models/cv/object_detection/yolov7/ixrt) | 4.3.0 | +| YOLOv8 | FP16 | [✅](models/cv/object_detection/yolov8/igie) | [✅](models/cv/object_detection/yolov8/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/object_detection/yolov8/igie) | [✅](models/cv/object_detection/yolov8/ixrt) | 4.3.0 | +| YOLOv9 | FP16 | [✅](models/cv/object_detection/yolov9/igie) | [✅](models/cv/object_detection/yolov9/ixrt) | 4.3.0 | +| YOLOv10 | FP16 | [✅](models/cv/object_detection/yolov10/igie) | [✅](models/cv/object_detection/yolov10/ixrt) | 4.3.0 | +| YOLOv11 | FP16 | [✅](models/cv/object_detection/yolov11/igie) | [✅](models/cv/object_detection/yolov11/ixrt) | 4.3.0 | +| YOLOv12 | FP16 | [✅](models/cv/object_detection/yolov12/igie) | | 4.3.0 | +| YOLOX | FP16 | [✅](models/cv/object_detection/yolox/igie) | [✅](models/cv/object_detection/yolox/ixrt) | 4.3.0 | +| | INT8 | [✅](models/cv/object_detection/yolox/igie) | [✅](models/cv/object_detection/yolox/ixrt) | 4.3.0 | #### Face Recognition | Model | Prec. | IGIE | ixRT | IXUCA SDK | |---------|-------|------|----------------------------------------------|-----------| -| FaceNet | FP16 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.2.0 | +| FaceNet | FP16 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/cv/face_recognition/facenet/ixrt) | 4.3.0 | #### OCR (Optical Character Recognition) | Model | Prec. | IGIE | IXUCA SDK | |---------------|-------|---------------------------------------|-----------| -| Kie_layoutXLM | FP16 | [✅](models/cv/ocr/kie_layoutxlm/igie) | 4.2.0 | -| SVTR | FP16 | [✅](models/cv/ocr/svtr/igie) | 4.2.0 | +| Kie_layoutXLM | FP16 | [✅](models/cv/ocr/kie_layoutxlm/igie) | 4.3.0 | +| SVTR | FP16 | [✅](models/cv/ocr/svtr/igie) | 4.3.0 | #### Pose Estimation | Model | Prec. | IGIE | ixRT | IXUCA SDK | |----------------------|-------|-----------------------------------------------|----------------------------------------------------------|-----------| -| HRNetPose | FP16 | [✅](models/cv/pose_estimation/hrnetpose/igie) | | 4.2.0 | -| Lightweight OpenPose | FP16 | | [✅](models/cv/pose_estimation/lightweight_openpose/ixrt) | 4.2.0 | -| RTMPose | FP16 | [✅](models/cv/pose_estimation/rtmpose/igie) | [✅](models/cv/pose_estimation/rtmpose/ixrt) | 4.2.0 | +| HRNetPose | FP16 | [✅](models/cv/pose_estimation/hrnetpose/igie) | | 4.3.0 | +| Lightweight OpenPose | FP16 | | [✅](models/cv/pose_estimation/lightweight_openpose/ixrt) | 4.3.0 | +| RTMPose | FP16 | [✅](models/cv/pose_estimation/rtmpose/igie) | [✅](models/cv/pose_estimation/rtmpose/ixrt) | 4.3.0 | #### Instance Segmentation | Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------|-------|------|-----------------------------------------------------|-----------| | Mask R-CNN | FP16 | | [✅](models/cv/instance_segmentation/mask_rcnn/ixrt) | 4.2.0 | -| SOLOv1 | FP16 | | [✅](models/cv/instance_segmentation/solov1/ixrt) | 4.2.0 | +| SOLOv1 | FP16 | | [✅](models/cv/instance_segmentation/solov1/ixrt) | 4.3.0 | #### Semantic Segmentation | Model | Prec. | IGIE | ixRT | IXUCA SDK | |-------|-------|------------------------------------------------|------|-----------| -| UNet | FP16 | [✅](models/cv/semantic_segmentation/unet/igie) | | 4.2.0 | +| UNet | FP16 | [✅](models/cv/semantic_segmentation/unet/igie) | | 4.3.0 | #### Multi-Object Tracking | Model | Prec. | IGIE | ixRT | IXUCA SDK | |---------------------|-------|----------------------------------------------------|------|-----------| -| FastReID | FP16 | [✅](models/cv/multi_object_tracking/fastreid/igie) | | 4.2.0 | -| DeepSort | FP16 | [✅](models/cv/multi_object_tracking/deepsort/igie) | | 4.2.0 | -| | INT8 | [✅](models/cv/multi_object_tracking/deepsort/igie) | | 4.2.0 | -| RepNet-Vehicle-ReID | FP16 | [✅](models/cv/multi_object_tracking/repnet/igie) | | 4.2.0 | +| FastReID | FP16 | [✅](models/cv/multi_object_tracking/fastreid/igie) | | 4.3.0 | +| DeepSort | FP16 | [✅](models/cv/multi_object_tracking/deepsort/igie) | | 4.3.0 | +| | INT8 | [✅](models/cv/multi_object_tracking/deepsort/igie) | | 4.3.0 | +| RepNet-Vehicle-ReID | FP16 | [✅](models/cv/multi_object_tracking/repnet/igie) | | 4.3.0 | ### Multimodal | Model | vLLM | IxFormer | IXUCA SDK | |---------------------|-----------------------------------------------------------------------|------------------------------------------------------------|-----------| -| Aria | [✅](models/multimodal/vision_language_model/aria/vllm) | | 4.2.0 | -| Chameleon-7B | [✅](models/multimodal/vision_language_model/chameleon_7b/vllm) | | 4.2.0 | -| CLIP | | [✅](models/multimodal/vision_language_model/clip/ixformer) | 4.2.0 | -| Fuyu-8B | [✅](models/multimodal/vision_language_model/fuyu_8b/vllm) | | 4.2.0 | -| H2OVL Mississippi | [✅](models/multimodal/vision_language_model/h2vol/vllm) | | 4.2.0 | -| Idefics3 | [✅](models/multimodal/vision_language_model/idefics3/vllm) | | 4.2.0 | -| InternVL2-4B | [✅](models/multimodal/vision_language_model/intern_vl/vllm) | | 4.2.0 | -| LLaVA | [✅](models/multimodal/vision_language_model/llava/vllm) | | 4.2.0 | -| LLaVA-Next-Video-7B | [✅](models/multimodal/vision_language_model/llava_next_video_7b/vllm) | | 4.2.0 | -| Llama-3.2 | [✅](models/multimodal/vision_language_model/llama-3.2/vllm) | | 4.2.0 | -| MiniCPM-V 2 | [✅](models/multimodal/vision_language_model/minicpm_v/vllm) | | 4.2.0 | -| Pixtral | [✅](models/multimodal/vision_language_model/pixtral/vllm) | | 4.2.0 | +| Aria | [✅](models/multimodal/vision_language_model/aria/vllm) | | 4.3.0 | +| Chameleon-7B | [✅](models/multimodal/vision_language_model/chameleon_7b/vllm) | | 4.3.0 | +| CLIP | | [✅](models/multimodal/vision_language_model/clip/ixformer) | 4.3.0 | +| Fuyu-8B | [✅](models/multimodal/vision_language_model/fuyu_8b/vllm) | | 4.3.0 | +| H2OVL Mississippi | [✅](models/multimodal/vision_language_model/h2vol/vllm) | | 4.3.0 | +| Idefics3 | [✅](models/multimodal/vision_language_model/idefics3/vllm) | | 4.3.0 | +| InternVL2-4B | [✅](models/multimodal/vision_language_model/intern_vl/vllm) | | 4.3.0 | +| LLaVA | [✅](models/multimodal/vision_language_model/llava/vllm) | | 4.3.0 | +| LLaVA-Next-Video-7B | [✅](models/multimodal/vision_language_model/llava_next_video_7b/vllm) | | 4.3.0 | +| Llama-3.2 | [✅](models/multimodal/vision_language_model/llama-3.2/vllm) | | 4.3.0 | +| MiniCPM-V 2 | [✅](models/multimodal/vision_language_model/minicpm_v/vllm) | | 4.3.0 | +| Pixtral | [✅](models/multimodal/vision_language_model/pixtral/vllm) | | 4.3.0 | ### NLP @@ -265,15 +265,15 @@ inference to be expanded in the future. | Model | Prec. | IGIE | ixRT | IXUCA SDK | |------------------|-------|-------------------------------------------|-------------------------------------------|-----------| -| ALBERT | FP16 | | [✅](models/nlp/plm/albert/ixrt) | 4.2.0 | -| BERT Base NER | INT8 | [✅](models/nlp/plm/bert_base_ner/igie) | | 4.2.0 | -| BERT Base SQuAD | FP16 | [✅](models/nlp/plm/bert_base_squad/igie) | [✅](models/nlp/plm/bert_base_squad/ixrt) | 4.2.0 | -| | INT8 | | [✅](models/nlp/plm/bert_base_squad/ixrt) | 4.2.0 | -| BERT Large SQuAD | FP16 | [✅](models/nlp/plm/bert_large_squad/igie) | [✅](models/nlp/plm/bert_large_squad/ixrt) | 4.2.0 | -| | INT8 | [✅](models/nlp/plm/bert_large_squad/igie) | [✅](models/nlp/plm/bert_large_squad/ixrt) | 4.2.0 | -| DeBERTa | FP16 | | [✅](models/nlp/plm/deberta/ixrt) | 4.2.0 | -| RoBERTa | FP16 | | [✅](models/nlp/plm/roberta/ixrt) | 4.2.0 | -| RoFormer | FP16 | | [✅](models/nlp/plm/roformer/ixrt) | 4.2.0 | +| ALBERT | FP16 | | [✅](models/nlp/plm/albert/ixrt) | 4.3.0 | +| BERT Base NER | INT8 | [✅](models/nlp/plm/bert_base_ner/igie) | | 4.3.0 | +| BERT Base SQuAD | FP16 | [✅](models/nlp/plm/bert_base_squad/igie) | [✅](models/nlp/plm/bert_base_squad/ixrt) | 4.3.0 | +| | INT8 | | [✅](models/nlp/plm/bert_base_squad/ixrt) | 4.3.0 | +| BERT Large SQuAD | FP16 | [✅](models/nlp/plm/bert_large_squad/igie) | [✅](models/nlp/plm/bert_large_squad/ixrt) | 4.3.0 | +| | INT8 | [✅](models/nlp/plm/bert_large_squad/igie) | [✅](models/nlp/plm/bert_large_squad/ixrt) | 4.3.0 | +| DeBERTa | FP16 | | [✅](models/nlp/plm/deberta/ixrt) | 4.3.0 | +| RoBERTa | FP16 | | [✅](models/nlp/plm/roberta/ixrt) | 4.3.0 | +| RoFormer | FP16 | | [✅](models/nlp/plm/roformer/ixrt) | 4.3.0 | | VideoBERT | FP16 | | [✅](models/nlp/plm/videobert/ixrt) | 4.2.0 | ### Audio @@ -282,7 +282,7 @@ inference to be expanded in the future. | Model | Prec. | IGIE | ixRT | IXUCA SDK | |-----------------|-------|-----------------------------------------------------|-----------------------------------------------------------|-----------| -| Conformer | FP16 | [✅](models/audio/speech_recognition/conformer/igie) | [✅](models/audio/speech_recognition/conformer/ixrt) | 4.2.0 | +| Conformer | FP16 | [✅](models/audio/speech_recognition/conformer/igie) | [✅](models/audio/speech_recognition/conformer/ixrt) | 4.3.0 | | Transformer ASR | FP16 | | [✅](models/audio/speech_recognition/transformer_asr/ixrt) | 4.2.0 | ### Others @@ -291,7 +291,7 @@ inference to be expanded in the future. | Model | Prec. | IGIE | ixRT | IXUCA SDK | |-------------|-------|------|------------------------------------------------------|-----------| -| Wide & Deep | FP16 | | [✅](models/others/recommendation/wide_and_deep/ixrt) | 4.2.0 | +| Wide & Deep | FP16 | | [✅](models/others/recommendation/wide_and_deep/ixrt) | 4.3.0 | --- -- Gitee