From dd33a7512105dcd0d452faa98b2f26108a60c208 Mon Sep 17 00:00:00 2001
From: sunzhe1996328 <545249803@qq.com>
Date: Mon, 28 Mar 2022 11:29:35 +0800
Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E7=B2=BE=E5=BA=A6=E5=92=8C?=
=?UTF-8?q?=E6=80=A7=E8=83=BD=E6=95=B0=E6=8D=AE=E4=BB=A5=E5=8F=8AACL?=
=?UTF-8?q?=E9=83=A8=E5=88=86?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../cv/BlitzNet_ID0948_for_ACL/LICENSE | 284 ++++++++++++++++++
.../cv/BlitzNet_ID0948_for_ACL/README.md | 70 +++++
.../BlitzNet_ID0948_for_ACL/freeze_graph.py | 90 ++++++
.../cv/BlitzNet_ID0948_for_ACL/imageToBin.py | 70 +++++
.../modelzoo_level.txt | 6 +
.../BlitzNet_ID0948_for_ACL/requirements.txt | 0
.../testBliznetPb_OM_Data.py | 233 ++++++++++++++
.../BlitzNet_ID0948_for_TensorFlow/README.md | 61 +++-
.../freeze_graph.py | 90 ++++++
.../testBliznetPb_OM_Data.py | 233 ++++++++++++++
10 files changed, 1130 insertions(+), 7 deletions(-)
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/requirements.txt
create mode 100644 ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py
create mode 100644 TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py
create mode 100644 TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE
new file mode 100644
index 000000000..8389e23f5
--- /dev/null
+++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/LICENSE
@@ -0,0 +1,284 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+------------------
+Files: third_party/compute_library/...
+
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+------------------
+Files: ACKNOWLEDGEMENTS
+LICENSE
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+ ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+------------------
+Files: third_party/hexagon
+
+Copyright (c) 2016-2019, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted (subject to the limitations in the
+disclaimer below) provided that the following conditions are met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
+GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
+HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
+IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md
new file mode 100644
index 000000000..c07aedecf
--- /dev/null
+++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/README.md
@@ -0,0 +1,70 @@
+
+## 推理过程
+环境
+- Tensorflow 1.15
+- python 3.7
+
+1. ckpt文件
+
+- ckpt文件下载地址:
+
+ https://sharegua.obs.cn-north-4.myhuaweicloud.com:443/checkpoint65.zip?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667698491&Signature=Ltfv5%2B5VbaFSklW3pI6W6oTh73A%3D
+
+ 通过freeze_graph.py转换成pb文件bliznet_tf_310.pb
+
+- pb文件下载地址:
+
+ https://sharegua.obs.myhuaweicloud.com:443/bliznet_tf_310.pb?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667656586&Signature=JhBRfk5dpeDFE%2BPy1jQg6Q4mvHY%3D
+
+2. om模型
+
+- om模型下载地址:
+
+ https://sharegua.obs.myhuaweicloud.com:443/bliznet_tf_310.om?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667656644&Signature=Z7DyzKRGPd27pYipfD2Ke/KSGAo%3D
+
+ 使用ATC模型转换工具进行模型转换时可以参考如下指令:
+
+```
+atc --model=/home/HwHiAiUser/atc/bliznet_tf_310.pb --framework=3 --output=/home/HwHiAiUser/atc/bliznet_tf_310 --soc_version=Ascend310 \
+ --input_shape="input:1,300,300,3" \
+ --log=info \
+ --out_nodes="concat_1:0;concat_2:0;ssd_2/Conv_7/BiasAdd:0"
+```
+
+3. 使用msame工具推理
+
+ 参考 https://gitee.com/ascend/tools/tree/master/msame, 获取msame推理工具及使用方法。
+
+ 获取到msame可执行文件之后,将待检测om文件放在model文件夹,然后进行性能测试。
+
+ msame推理可以参考如下指令:
+```
+./msame --model "/home/HwHiAiUser/msame/bliznet_tf_310.om" --input "/home/HwHiAiUser/msame/data" --output "/home/HwHiAiUser/msame/out/" --outfmt TXT
+```
+- 将测试集数据转为bin文件:
+```
+ imageToBin.py
+```
+
+- 测试数据bin文件下载地址:
+
+ https://sharegua.obs.cn-north-4.myhuaweicloud.com:443/img.zip?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667698452&Signature=f3aLaUdPnodF8PKtCaI5Ox4wb6c%3D
+
+
+4. 性能测试
+
+ 使用testBliznetPb_OM_Data.py对推理完成后获得的txt文件进行测试
+
+
精度测试
+
+训练集:VOC12 train-seg-aug
+
+测试集:VOC12 val
+
+| | mIoU | mAP |
+| ---------- | -------- | -------- |
+| 论文精度 | 72.8 | 80.0 |
+| GPU精度32 | 72.8 | 80.0 |
+| GPU精度16 | 72.0 | 78.3 |
+| NPU精度 | 70.1 | 77.6 |
+| 推理精度 | 70.1 | 77.6 |
\ No newline at end of file
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py
new file mode 100644
index 000000000..775d0e09f
--- /dev/null
+++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/freeze_graph.py
@@ -0,0 +1,90 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from npu_bridge.npu_init import *
+import tensorflow as tf
+from tensorflow.python.tools import freeze_graph
+import os
+from Train.config import args
+from help_modelarts import modelarts_result2obs
+
+from Train.resnet import ResNet
+from Train.config import config as net_config
+
+INIT_CKPT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'checkpoint65')
+ckpt_path = os.path.join(INIT_CKPT_DIR, 'model.ckpt-65000')
+
+def main():
+ print("start ckpt To pb")
+ print("ckpt_path")
+ tf.reset_default_graph()
+ img_ph = tf.placeholder(tf.float32, shape=[1, 300, 300, 3], name="input")
+ dataset_num_classes = 21
+
+ net = ResNet
+ depth = 50
+ net = net(config=net_config, depth=depth, training=False)
+
+ net.create_trunk(img_ph)
+
+ if args.detect:
+ net.create_multibox_head(dataset_num_classes)
+ confidence = net.outputs['confidence']
+ location = net.outputs['location']
+ else:
+ location, confidence = None, None
+
+ if args.segment:
+ net.create_segmentation_head(dataset_num_classes)
+ seg_logits = net.outputs['segmentation']
+ else:
+ seg_logits = None
+
+ print("confidence = ", confidence)
+ print("location = ", location)
+ print("seg_logits = ", seg_logits)
+
+ with tf.Session() as sess:
+ tf.train.write_graph(sess.graph_def, args.result_dir, 'model.pb')
+ modelarts_result2obs(args)
+ freeze_graph.freeze_graph(
+ input_graph=os.path.join(args.result_dir, 'model.pb'),
+ input_saver='',
+ input_binary=False,
+ input_checkpoint=ckpt_path,
+ output_node_names="concat_1, concat_2, ssd_2/Conv_7/BiasAdd", # graph outputs node
+ restore_op_name='save/restore_all',
+ filename_tensor_name='save/Const:0',
+ output_graph=os.path.join(args.result_dir, 'bliznet_tf_310.pb'), # graph outputs name
+ clear_devices=False,
+ initializer_nodes="")
+ print("done")
+
+ modelarts_result2obs(args)
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py
new file mode 100644
index 000000000..23635dd95
--- /dev/null
+++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/imageToBin.py
@@ -0,0 +1,70 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import tensorflow as tf
+from config import args
+from getData.voc_loader import VOCLoader
+
+import progressbar
+import logging
+log = logging.getLogger()
+import numpy as np
+
+def main(argv=None):
+ if args.dataset == 'voc07' or args.dataset == 'voc07+12':
+ loader = VOCLoader('07', 'test')
+ if args.dataset == 'voc12-val':
+ loader = VOCLoader('12', 'val', segmentation=args.segment)
+
+ filenames = loader.get_filenames()
+ image_list = []
+
+ inputs = tf.placeholder(tf.float32, shape=[None, None, 3], name="input")
+ img_ph = tf.image.resize_bilinear(tf.expand_dims(inputs, 0), (300, 300))# 增加一维,并reshape
+
+ with tf.Session(config=tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)) as sess0:
+ bar = progressbar.ProgressBar()# 显示进度条
+ for i in bar(range(len(filenames))):
+ name = filenames[i]
+ img = loader.load_image(name) # 获取图片
+ image = sess0.run(img_ph, feed_dict={inputs: img})
+
+ image_list.append(image)
+ gt_bboxes, seg_gt, gt_cats, w, h, difficulty = loader.read_annotations(name) # 获取图片信息
+ image.tofile("./binFile/img/{0:05d}.bin".format(i))
+ # im = np.fromfile("./binFile/img/{0:05d}.bin".format(i), dtype=np.float32)
+ # print(im)
+ gt_bboxes.tofile("./binFile/gt_bboxes/{0:05d}.bin".format(i))
+ seg_gt.tofile("./binFile/seg_gt/{0:05d}.bin".format(i))
+ gt_cats.tofile("./binFile/gt_cats/{0:05d}.bin".format(i))
+ # w.tofile("./binFile/w/{0:05d}.bin".format(i))
+ # h.tofile("./binFile/h/{0:05d}.bin".format(i))
+ difficulty.tofile("./binFile/difficulty/{0:05d}.bin".format(i))
+
+
+if __name__ == '__main__':
+ tf.app.run()
\ No newline at end of file
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt
new file mode 100644
index 000000000..1badb843c
--- /dev/null
+++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/modelzoo_level.txt
@@ -0,0 +1,6 @@
+FuncStatus:OK
+PrecisionStatus:POK
+AutoTune:POK
+PerfStatus:POK
+ModelConvert:OK
+QuantStatus:OK
\ No newline at end of file
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/requirements.txt b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/requirements.txt
new file mode 100644
index 000000000..e69de29bb
diff --git a/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py
new file mode 100644
index 000000000..9b9564af6
--- /dev/null
+++ b/ACL_TensorFlow/contrib/cv/BlitzNet_ID0948_for_ACL/testBliznetPb_OM_Data.py
@@ -0,0 +1,233 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import tensorflow as tf
+from config import args
+from getData.voc_loader import VOCLoader
+
+from tensorflow.python.ops.metrics_impl import mean_iou as streaming_mean_iou
+from utils import decode_bboxes
+from getData.boxer import PriorBoxGrid
+from config import config as net_config
+from detector import Detector
+from tabulate import tabulate
+import progressbar
+import numpy as np
+import logging
+log = logging.getLogger()
+
+def eval_category(gt, dets, cid):
+ """Computes average precision for one category"""
+ cgt = gt[cid]
+ cdets = np.array(dets[cid])
+ if (cdets.shape == (0, )):
+ return None, None
+ scores = cdets[:, 1]
+ sorted_inds = np.argsort(-scores)
+ image_ids = cdets[sorted_inds, 0].astype(int)
+ BB = cdets[sorted_inds]
+
+ npos = 0
+ for img_gt in cgt.values():
+ img_gt['ignored'] = np.array(img_gt['difficult'])
+ img_gt['det'] = np.zeros(len(img_gt['difficult']), dtype=np.bool)
+ npos += np.sum(~img_gt['ignored'])
+
+ nd = len(image_ids)
+ tp = np.zeros(nd)
+ fp = np.zeros(nd)
+ for d in range(nd):
+ ovmax = -np.inf
+ if image_ids[d] in cgt:
+ R = cgt[image_ids[d]]
+ bb = BB[d, 2:].astype(float)
+
+ BBGT = R['bbox'].astype(float)
+
+ # compute overlaps
+ # intersection
+ ixmin = np.maximum(BBGT[:, 0], bb[0])
+ iymin = np.maximum(BBGT[:, 1], bb[1])
+ ixmax = np.minimum(BBGT[:, 0] + BBGT[:, 2], bb[0] + bb[2])
+ iymax = np.minimum(BBGT[:, 1] + BBGT[:, 3], bb[1] + bb[3])
+ iw = np.maximum(ixmax - ixmin, 0.)
+ ih = np.maximum(iymax - iymin, 0.)
+ inters = iw * ih
+
+ # union
+ uni = (bb[2] * bb[3] + BBGT[:, 2] * BBGT[:, 3] - inters)
+
+ overlaps = inters / uni
+ ovmax = np.max(overlaps)
+ jmax = np.argmax(overlaps)
+
+ if ovmax > args.voc_iou_thresh:
+ if not R['ignored'][jmax]:
+ if not R['det'][jmax]:
+ tp[d] = 1.
+ R['det'][jmax] = True
+ else:
+ fp[d] = 1.
+ else:
+ fp[d] = 1.
+
+ # compute precision recall
+ fp = np.cumsum(fp)
+ tp = np.cumsum(tp)
+ rec = tp / float(npos)
+ N = float(npos)
+ # avoid divide by zero in case the first detection matches a difficult
+ # ground truth
+ prec = rec * N / np.maximum(rec * N + fp, np.finfo(np.float32).eps)
+ return rec, prec
+
+def voc_ap(rec, prec, use_07_metric=False):
+ """ ap = voc_ap(rec, prec, [use_07_metric])
+ Compute VOC AP given precision and recall.
+ If use_07_metric is true, uses the
+ VOC 07 11 point method (default:False).
+ """
+ if use_07_metric:
+ # 11 point metric
+ ap = 0.
+ for t in np.arange(0., 1.1, 0.1):
+ p = 0 if np.sum(rec >= t) == 0 else np.max(prec[rec >= t])
+ ap = ap + p / 11.
+ else:
+ # correct AP calculation
+ # first append sentinel values at the end
+ mrec = np.concatenate(([0.], rec, [1.]))
+ mpre = np.concatenate(([0.], prec, [0.]))
+
+ # compute the precision envelope
+ for i in range(mpre.size - 1, 0, -1):
+ mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
+
+ # to calculate area under PR curve, look for points
+ # where X axis (recall) changes value
+ i = np.where(mrec[1:] != mrec[:-1])[0]
+
+ # and sum (\Delta recall) * prec
+ ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
+ return ap
+
+def compute_ap(gt, dets, loader):
+ """computes average precision for all categories"""
+ aps = {}
+ for cid in range(1, loader.num_classes):
+ cat_name = loader.ids_to_cats[cid]
+ rec, prec = eval_category(gt, dets, cid)
+ ap = voc_ap(rec, prec, loader.year == '07')
+ aps[loader.ids_to_cats[cid]] = ap
+ return aps
+
+def make_detection_table(gt, dets, loader):
+ """creates a table with AP per category and mean AP"""
+ aps = compute_ap(gt, dets, loader)
+ print("ap = ", aps)
+ eval_cache = [aps]
+
+ table = []
+ for cid in range(1, loader.num_classes):
+ cat_name = loader.ids_to_cats[cid]
+ table.append((cat_name, ) + tuple(aps.get(cat_name, 'N/A') for aps in eval_cache))
+ mean_ap = np.mean([a for a in list(aps.values()) if a >= 0])
+ table.append(("AVERAGE", ) + tuple(np.mean(list(aps.values())) for aps in eval_cache))
+ x = tabulate(table, headers=(["Category", "mAP (all)"]),
+ tablefmt='orgtbl', floatfmt=".3f")
+ log.info("Eval results:\n%s", x)
+ return table
+
+def compute_mean_iou(detector):
+ iou = detector.get_mean_iou()
+ print(iou)
+ log.info("\n Mean IoU is %f", iou)
+ return iou
+
+def main(argv=None):
+ if args.dataset == 'voc07' or args.dataset == 'voc07+12':
+ loader = VOCLoader('07', 'test')
+ if args.dataset == 'voc12-val':
+ loader = VOCLoader('12', 'val', segmentation=args.segment)
+
+ with tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
+ log_device_placement=False)) as sess:
+ detector = Detector(sess, loader, net_config, no_gt=args.no_seg_gt)
+
+ filenames = loader.get_filenames()
+ gt = {cid: {} for cid in range(1, loader.num_classes)}
+ dets = {cid: [] for cid in range(1, loader.num_classes)}
+
+ bar = progressbar.ProgressBar()# 显示进度条
+ # print("filenames = ", filenames)
+
+ init_op = tf.group(tf.local_variables_initializer(), tf.global_variables_initializer())
+ sess.run(init_op)
+ for i in bar(range(len(filenames))):
+ name = filenames[i]
+ # print("name = ", name)
+ img_id = i
+ img = loader.load_image(name) # 获取图片
+ # img = np.fromfile("./binFile/img/{0:05d}.bin".format(i), dtype=np.float32)
+ # img.shape = 1, 300, 300, 3
+ gt_bboxes, seg_gt, gt_cats, w, h, difficulty = loader.read_annotations(name) # 获取图片信息
+
+ confidence = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_0.txt".format(i))
+ location = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_1.txt".format(i))
+ seg_logits = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_2.txt".format(i))
+ confidence.shape = 1, 45390, 21
+ location.shape = 1, 45390, 4
+ seg_logits.shape = 1, 75, 75, 21
+
+ for cid in np.unique(gt_cats):
+ mask = (gt_cats == cid)
+ bbox = gt_bboxes[mask]
+ diff = difficulty[mask]
+ det = np.zeros(len(diff), dtype=np.bool)
+ gt[cid][img_id] = {'bbox': bbox, 'difficult': diff, 'det': det}
+
+ confidence1 = confidence
+ location1 = location
+ seg_logits1 = seg_logits
+ output = detector.feed_forward(img, seg_gt, confidence1, location1, seg_logits1,
+ w, h, name, gt_bboxes, gt_cats) # result
+
+ if args.detect:
+ det_bboxes, det_probs, det_cats = output[:3]
+ for i in range(len(det_cats)):
+ dets[det_cats[i]].append((img_id, det_probs[i]) + tuple(det_bboxes[i]))
+
+ # print("gt = ", gt)
+ # print("dets = ", dets)
+ print("table result:")
+ table = make_detection_table(gt, dets, loader) if args.detect else None
+ print("iou result:")
+ iou = compute_mean_iou(detector) if args.segment else None
+
+
+if __name__ == '__main__':
+ tf.app.run()
\ No newline at end of file
diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md
index 4e3f0e500..f51769bdd 100644
--- a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md
+++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/README.md
@@ -118,10 +118,11 @@ BlitzNet在一次前向传递中联合执行对象检测和语义分割,从而
快速上手
-- 数据集准备
-
+- 训练数据集准备
+
OBS下载地址:(下载的数据集为处理完的tf数据集)
https://blitznets.obs.myhuaweicloud.com:443/Datasets/voc12-train-seg?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1661686224&Signature=QkWct66ZOwIUfNOYeoWFFZ/FTsk%3D
+
- ResNet预训练模型准备
OBS下载地址:(将下载的resnet50_full.ckpt文件置于Weights_imagenet中)
@@ -198,6 +199,51 @@ data_input_test.py
2. 参考脚本的模型存储路径为test/output/*,训练脚本train_*.log中可查看性能、精度的相关运行状态。
+## 推理过程
+1. ckpt文件
+
+- ckpt文件下载地址:
+
+ https://sharegua.obs.cn-north-4.myhuaweicloud.com:443/checkpoint65.zip?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667698491&Signature=Ltfv5%2B5VbaFSklW3pI6W6oTh73A%3D
+
+ 通过freeze_graph.py转换成pb文件bliznet_tf_310.pb
+
+- pb文件下载地址:
+
+ https://sharegua.obs.myhuaweicloud.com:443/bliznet_tf_310.pb?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667656586&Signature=JhBRfk5dpeDFE%2BPy1jQg6Q4mvHY%3D
+
+2. om模型
+
+- om模型下载地址:
+
+ https://sharegua.obs.myhuaweicloud.com:443/bliznet_tf_310.om?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667656644&Signature=Z7DyzKRGPd27pYipfD2Ke/KSGAo%3D
+
+ 使用ATC模型转换工具进行模型转换时可以参考如下指令:
+
+```
+atc --model=/home/HwHiAiUser/atc/bliznet_tf_310.pb --framework=3 --output=/home/HwHiAiUser/atc/bliznet_tf_310 --soc_version=Ascend310 \
+ --input_shape="input:1,300,300,3" \
+ --log=info \
+ --out_nodes="concat_1:0;concat_2:0;ssd_2/Conv_7/BiasAdd:0"
+```
+
+3. 使用msame工具推理
+
+ 参考 https://gitee.com/ascend/tools/tree/master/msame, 获取msame推理工具及使用方法。
+
+ 获取到msame可执行文件之后,将待检测om文件放在model文件夹,然后进行性能测试。
+
+ msame推理可以参考如下指令:
+```
+./msame --model "/home/HwHiAiUser/msame/bliznet_tf_310.om" --input "/home/HwHiAiUser/msame/data" --output "/home/HwHiAiUser/msame/out/" --outfmt TXT
+```
+- 测试数据bin文件下载地址:
+
+ https://sharegua.obs.cn-north-4.myhuaweicloud.com:443/img.zip?AccessKeyId=UC40X3U4Z2RUPSTV8ADH&Expires=1667698452&Signature=f3aLaUdPnodF8PKtCaI5Ox4wb6c%3D
+
+4. 性能测试
+
+ 使用testBliznetPb_OM_Data.py对推理完成后获得的txt文件进行测试
精度测试
@@ -205,8 +251,9 @@ data_input_test.py
测试集:VOC12 val
-| | mIoU | mAP |
-| ---------- | -------- | -------- |
-| 论文精度 | 72.8 | 80.0 |
-| GPU精度 | 72.8 | 80.0 |
-| NPU精度 | 待测 | 待测 |
\ No newline at end of file
+| | mIoU | mAP | 性能|
+| ---------- | -------- | -------- | -------- |
+| 论文精度 | 72.8 | 80.0 | / |
+| GPU精度32 | 72.8 | 80.0 | 0.35 sec/batch |
+| GPU精度16 | 72.0 | 78.3 | 0.35 sec/batch |
+| NPU精度 | 70.1 | 77.6 | 0.5 sec/batch |
\ No newline at end of file
diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py
new file mode 100644
index 000000000..775d0e09f
--- /dev/null
+++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/freeze_graph.py
@@ -0,0 +1,90 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from npu_bridge.npu_init import *
+import tensorflow as tf
+from tensorflow.python.tools import freeze_graph
+import os
+from Train.config import args
+from help_modelarts import modelarts_result2obs
+
+from Train.resnet import ResNet
+from Train.config import config as net_config
+
+INIT_CKPT_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'checkpoint65')
+ckpt_path = os.path.join(INIT_CKPT_DIR, 'model.ckpt-65000')
+
+def main():
+ print("start ckpt To pb")
+ print("ckpt_path")
+ tf.reset_default_graph()
+ img_ph = tf.placeholder(tf.float32, shape=[1, 300, 300, 3], name="input")
+ dataset_num_classes = 21
+
+ net = ResNet
+ depth = 50
+ net = net(config=net_config, depth=depth, training=False)
+
+ net.create_trunk(img_ph)
+
+ if args.detect:
+ net.create_multibox_head(dataset_num_classes)
+ confidence = net.outputs['confidence']
+ location = net.outputs['location']
+ else:
+ location, confidence = None, None
+
+ if args.segment:
+ net.create_segmentation_head(dataset_num_classes)
+ seg_logits = net.outputs['segmentation']
+ else:
+ seg_logits = None
+
+ print("confidence = ", confidence)
+ print("location = ", location)
+ print("seg_logits = ", seg_logits)
+
+ with tf.Session() as sess:
+ tf.train.write_graph(sess.graph_def, args.result_dir, 'model.pb')
+ modelarts_result2obs(args)
+ freeze_graph.freeze_graph(
+ input_graph=os.path.join(args.result_dir, 'model.pb'),
+ input_saver='',
+ input_binary=False,
+ input_checkpoint=ckpt_path,
+ output_node_names="concat_1, concat_2, ssd_2/Conv_7/BiasAdd", # graph outputs node
+ restore_op_name='save/restore_all',
+ filename_tensor_name='save/Const:0',
+ output_graph=os.path.join(args.result_dir, 'bliznet_tf_310.pb'), # graph outputs name
+ clear_devices=False,
+ initializer_nodes="")
+ print("done")
+
+ modelarts_result2obs(args)
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff --git a/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py
new file mode 100644
index 000000000..9b9564af6
--- /dev/null
+++ b/TensorFlow/contrib/cv/BlitzNet_ID0948_for_TensorFlow/testBliznetPb_OM_Data.py
@@ -0,0 +1,233 @@
+# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============================================================================
+# Copyright 2021 Huawei Technologies Co., Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import tensorflow as tf
+from config import args
+from getData.voc_loader import VOCLoader
+
+from tensorflow.python.ops.metrics_impl import mean_iou as streaming_mean_iou
+from utils import decode_bboxes
+from getData.boxer import PriorBoxGrid
+from config import config as net_config
+from detector import Detector
+from tabulate import tabulate
+import progressbar
+import numpy as np
+import logging
+log = logging.getLogger()
+
+def eval_category(gt, dets, cid):
+ """Computes average precision for one category"""
+ cgt = gt[cid]
+ cdets = np.array(dets[cid])
+ if (cdets.shape == (0, )):
+ return None, None
+ scores = cdets[:, 1]
+ sorted_inds = np.argsort(-scores)
+ image_ids = cdets[sorted_inds, 0].astype(int)
+ BB = cdets[sorted_inds]
+
+ npos = 0
+ for img_gt in cgt.values():
+ img_gt['ignored'] = np.array(img_gt['difficult'])
+ img_gt['det'] = np.zeros(len(img_gt['difficult']), dtype=np.bool)
+ npos += np.sum(~img_gt['ignored'])
+
+ nd = len(image_ids)
+ tp = np.zeros(nd)
+ fp = np.zeros(nd)
+ for d in range(nd):
+ ovmax = -np.inf
+ if image_ids[d] in cgt:
+ R = cgt[image_ids[d]]
+ bb = BB[d, 2:].astype(float)
+
+ BBGT = R['bbox'].astype(float)
+
+ # compute overlaps
+ # intersection
+ ixmin = np.maximum(BBGT[:, 0], bb[0])
+ iymin = np.maximum(BBGT[:, 1], bb[1])
+ ixmax = np.minimum(BBGT[:, 0] + BBGT[:, 2], bb[0] + bb[2])
+ iymax = np.minimum(BBGT[:, 1] + BBGT[:, 3], bb[1] + bb[3])
+ iw = np.maximum(ixmax - ixmin, 0.)
+ ih = np.maximum(iymax - iymin, 0.)
+ inters = iw * ih
+
+ # union
+ uni = (bb[2] * bb[3] + BBGT[:, 2] * BBGT[:, 3] - inters)
+
+ overlaps = inters / uni
+ ovmax = np.max(overlaps)
+ jmax = np.argmax(overlaps)
+
+ if ovmax > args.voc_iou_thresh:
+ if not R['ignored'][jmax]:
+ if not R['det'][jmax]:
+ tp[d] = 1.
+ R['det'][jmax] = True
+ else:
+ fp[d] = 1.
+ else:
+ fp[d] = 1.
+
+ # compute precision recall
+ fp = np.cumsum(fp)
+ tp = np.cumsum(tp)
+ rec = tp / float(npos)
+ N = float(npos)
+ # avoid divide by zero in case the first detection matches a difficult
+ # ground truth
+ prec = rec * N / np.maximum(rec * N + fp, np.finfo(np.float32).eps)
+ return rec, prec
+
+def voc_ap(rec, prec, use_07_metric=False):
+ """ ap = voc_ap(rec, prec, [use_07_metric])
+ Compute VOC AP given precision and recall.
+ If use_07_metric is true, uses the
+ VOC 07 11 point method (default:False).
+ """
+ if use_07_metric:
+ # 11 point metric
+ ap = 0.
+ for t in np.arange(0., 1.1, 0.1):
+ p = 0 if np.sum(rec >= t) == 0 else np.max(prec[rec >= t])
+ ap = ap + p / 11.
+ else:
+ # correct AP calculation
+ # first append sentinel values at the end
+ mrec = np.concatenate(([0.], rec, [1.]))
+ mpre = np.concatenate(([0.], prec, [0.]))
+
+ # compute the precision envelope
+ for i in range(mpre.size - 1, 0, -1):
+ mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
+
+ # to calculate area under PR curve, look for points
+ # where X axis (recall) changes value
+ i = np.where(mrec[1:] != mrec[:-1])[0]
+
+ # and sum (\Delta recall) * prec
+ ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
+ return ap
+
+def compute_ap(gt, dets, loader):
+ """computes average precision for all categories"""
+ aps = {}
+ for cid in range(1, loader.num_classes):
+ cat_name = loader.ids_to_cats[cid]
+ rec, prec = eval_category(gt, dets, cid)
+ ap = voc_ap(rec, prec, loader.year == '07')
+ aps[loader.ids_to_cats[cid]] = ap
+ return aps
+
+def make_detection_table(gt, dets, loader):
+ """creates a table with AP per category and mean AP"""
+ aps = compute_ap(gt, dets, loader)
+ print("ap = ", aps)
+ eval_cache = [aps]
+
+ table = []
+ for cid in range(1, loader.num_classes):
+ cat_name = loader.ids_to_cats[cid]
+ table.append((cat_name, ) + tuple(aps.get(cat_name, 'N/A') for aps in eval_cache))
+ mean_ap = np.mean([a for a in list(aps.values()) if a >= 0])
+ table.append(("AVERAGE", ) + tuple(np.mean(list(aps.values())) for aps in eval_cache))
+ x = tabulate(table, headers=(["Category", "mAP (all)"]),
+ tablefmt='orgtbl', floatfmt=".3f")
+ log.info("Eval results:\n%s", x)
+ return table
+
+def compute_mean_iou(detector):
+ iou = detector.get_mean_iou()
+ print(iou)
+ log.info("\n Mean IoU is %f", iou)
+ return iou
+
+def main(argv=None):
+ if args.dataset == 'voc07' or args.dataset == 'voc07+12':
+ loader = VOCLoader('07', 'test')
+ if args.dataset == 'voc12-val':
+ loader = VOCLoader('12', 'val', segmentation=args.segment)
+
+ with tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
+ log_device_placement=False)) as sess:
+ detector = Detector(sess, loader, net_config, no_gt=args.no_seg_gt)
+
+ filenames = loader.get_filenames()
+ gt = {cid: {} for cid in range(1, loader.num_classes)}
+ dets = {cid: [] for cid in range(1, loader.num_classes)}
+
+ bar = progressbar.ProgressBar()# 显示进度条
+ # print("filenames = ", filenames)
+
+ init_op = tf.group(tf.local_variables_initializer(), tf.global_variables_initializer())
+ sess.run(init_op)
+ for i in bar(range(len(filenames))):
+ name = filenames[i]
+ # print("name = ", name)
+ img_id = i
+ img = loader.load_image(name) # 获取图片
+ # img = np.fromfile("./binFile/img/{0:05d}.bin".format(i), dtype=np.float32)
+ # img.shape = 1, 300, 300, 3
+ gt_bboxes, seg_gt, gt_cats, w, h, difficulty = loader.read_annotations(name) # 获取图片信息
+
+ confidence = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_0.txt".format(i))
+ location = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_1.txt".format(i))
+ seg_logits = np.loadtxt("./binFile/test/2021118_18_51_25_234650/{0:05d}_output_2.txt".format(i))
+ confidence.shape = 1, 45390, 21
+ location.shape = 1, 45390, 4
+ seg_logits.shape = 1, 75, 75, 21
+
+ for cid in np.unique(gt_cats):
+ mask = (gt_cats == cid)
+ bbox = gt_bboxes[mask]
+ diff = difficulty[mask]
+ det = np.zeros(len(diff), dtype=np.bool)
+ gt[cid][img_id] = {'bbox': bbox, 'difficult': diff, 'det': det}
+
+ confidence1 = confidence
+ location1 = location
+ seg_logits1 = seg_logits
+ output = detector.feed_forward(img, seg_gt, confidence1, location1, seg_logits1,
+ w, h, name, gt_bboxes, gt_cats) # result
+
+ if args.detect:
+ det_bboxes, det_probs, det_cats = output[:3]
+ for i in range(len(det_cats)):
+ dets[det_cats[i]].append((img_id, det_probs[i]) + tuple(det_bboxes[i]))
+
+ # print("gt = ", gt)
+ # print("dets = ", dets)
+ print("table result:")
+ table = make_detection_table(gt, dets, loader) if args.detect else None
+ print("iou result:")
+ iou = compute_mean_iou(detector) if args.segment else None
+
+
+if __name__ == '__main__':
+ tf.app.run()
\ No newline at end of file
--
Gitee