diff --git a/dsoftbus/build/LICENSE b/dsoftbus/build/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..c9c57b14bb849162acb56648bf2e19f849b893f2
--- /dev/null
+++ b/dsoftbus/build/LICENSE
@@ -0,0 +1,46 @@
+Copyright (c) 2021 Huawei Device Co., Ltd.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+
+***************************************************************
+build, it is derived from the following:
+https://chromium.googlesource.com/chromium/src.git
+version 83.0.4098.1
+***************************************************************
+Copyright 2015 The Chromium Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/dsoftbus/build/OAT.xml b/dsoftbus/build/OAT.xml
new file mode 100755
index 0000000000000000000000000000000000000000..0f4ea550a959468cefbe7495ef8c25dce82d833f
--- /dev/null
+++ b/dsoftbus/build/OAT.xml
@@ -0,0 +1,147 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dsoftbus/build/README_zh.md b/dsoftbus/build/README_zh.md
new file mode 100755
index 0000000000000000000000000000000000000000..691397d88c8ec0426a4d0981333c8c8a6c4ea12f
--- /dev/null
+++ b/dsoftbus/build/README_zh.md
@@ -0,0 +1,185 @@
+# 编译构建
+
+- [简介](#section11660541593)
+- [目录](#section161941989596)
+- [约束与限制](#section2029921310472)
+- [说明](#section1312121216216)
+- [常见问题说明](#section131336181)
+- [相关仓](#section1371113476307)
+
+## 简介
+
+编译构建子系统提供了一个基于Gn和ninja的编译构建框架。
+
+根据产品配置,编译生成对应的镜像包。其中编译构建流程为:
+
+1. 使用Gn配置构建目标。
+2. Gn运行后会生成ninja文件。
+3. 通过运行ninja来执行编译任务。
+
+## 目录
+
+```
+/build # 编译构建主目录
+├── config # 编译相关的配置项
+├── core
+│ └── gn # 编译入口BUILD.gn配置
+├── loader # 各个部件配置加载、模板生成
+├── ohos # OpenHarmony编译打包流程配置
+│ ├── kits # kits编译打包模板和处理流程
+│ ├── ndk # ndk模板和处理流程
+│ ├── notice # notice模板和处理流程
+│ ├── packages # 版本打包模板和处理流程
+│ ├── sa_profile # sa模板和处理流程
+│ ├── sdk # sdk模板和处理流程,包括sdk中包含的模块配置
+│ └── testfwk # 测试相关的处理
+├── scripts # 编译相关的python脚本
+├── templates # c/c++编译模板定义
+└── toolchain # 编译工具链配置
+```
+
+## 约束与限制
+
+- 编译环境需要Ubuntu18.04及以上版本。
+
+- 安装编译所需的程序包。
+
+ 安装命令:
+
+ ```
+ sudo apt-get install binutils git-core gnupg flex bison gperf build-essential zip curl zlib1g-dev gcc-multilib g++-multilib libc6-dev-i386 lib32ncurses5-dev x11proto-core-dev libx11-dev lib32z-dev ccache libgl1-mesa-dev libxml2-utils xsltproc unzip m4
+ ```
+
+
+## 说明
+
+1. 代码根目录下执行全量版本的编译命令:
+
+ ```
+ ./build.sh --product-name {product_name}
+ ```
+
+ 编译结果镜像输出在 out/ohos-arm-relese/packages/phone/images/目录下。
+
+2. 编译命令支持选项:
+
+ ```
+ --product-name # 必须 编译的产品名称,如:Hi3516DV300
+ --build-target # 可选 指定编译目标,可以指定多个
+ --gn-args # 可选 gn参数,支持指定多个
+ --ccache # 可选 编译使用ccache,需要本地安装ccache
+ --sparse-image # 可选 生成sparse镜像,需要本地安装img2simg,默认镜像为raw
+ ```
+
+3. 在已有部件中增加模块。
+
+ **支持的模板类型:**
+
+ ```
+ ohos_executable
+ ohos_shared_library
+ ohos_static_library
+ ohos_source_set
+
+ # 预编译模板:
+ ohos_prebuilt_executable
+ ohos_prebuilt_shared_library
+ ohos_prebuilt_etc
+ ```
+
+ 在模块目录下配置BUILD.gn,根据类型选择对应的模板。
+
+ **例子:**
+
+ _ohos\_shared\_library示例:_
+
+ ```
+ import("//build/ohos.gni")
+ ohos_shared_library("helloworld") {
+ sources = []
+ include_dirs = []
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ ldflags = []
+ configs = []
+ deps =[] # 部件内模块依赖
+
+ # 跨部件模块依赖定义,
+ # 定义格式为 "部件名:模块名称"
+ # 这里依赖的模块必须是依赖的部件声明在inner_kits中的模块
+ external_deps = [
+ "part_name:module_name",
+ ]
+
+ output_name = "" # 可选,模块输出名
+ output_extension = "" # 可选,模块名后缀
+ module_install_dir = "" #可选,缺省在/system/lib64或/system/lib下, 模块安装路径,模块安装路径,从system/,vendor/后开始指定
+ relative_install_dir = "" #可选,模块安装相对路径,相对于/system/lib64或/system/lib;如果有module_install_dir配置时,该配置不生效
+
+ part_name = "" # 必选,所属部件名称
+ }
+ ```
+
+ _ohos\_executable示例:_
+
+ ohos\_executable模板属性和ohos\_shared\_library基本一致
+
+ _注意:可执行模块(即ohos\_executable模板定义的)默认是不安装的,如果要安装,需要指定install\_enable = true_
+
+ _ohos\_prebuilt\_etc示例:_
+
+ ```
+ import("//build/ohos.gni")
+ ohos_prebuilt_etc("etc_file") {
+ source = "file"
+ deps = [] # 部件内模块依赖
+ module_install_dir = "" #可选,模块安装路径,从system/,vendor/后开始指定
+ relative_install_dir = "" #可选,模块安装相对路径,相对于system/etc;如果有module_install_dir配置时,该配置不生效
+ part_name = "" # 必选,所属部件名称
+ }
+ ```
+
+4. 部件配置。
+
+ 部件配置在所属子系统的根目录下的 ohos.build文件中,一个子系统可以配置多个部件。
+
+ 一个部件包含module\_list、inner\_kits、system\_kits、test\_list四个部分的声明:
+
+ - module\_list:部件包含的模块列表
+ - inner\_kits:部件提供其它部件的接口
+ - system\_kits:部件提供给生成应用的接口
+ - test\_list:部件对应模块的测试用例
+
+ ohos.build中各个配置,都只是声明,具体的模块配置还是在对应的BUILD.gn中。
+
+5. ohos-sdk编译。
+
+ **编译命令:**
+
+ ``` ./build.sh --product-name ohos-sdk --ccache ```
+
+ **结果输出:**
+
+ ``` out/ohos-arm64-release/packages/ohos-sdk/ ```
+
+ **ohos-sdk模块配置:**
+
+ ``` build/ohos/sdk/ohos_sdk_description_std.json ```
+
+
+
+## 常见问题说明
+
+**如何将一个模块编译并打包到版本中?**
+
+- 模块要指定part\_name,指定它归属的部件,一个模块只能属于一个部件;
+- 部件的模块,要在部件配置的module\_list中,或者可以被module\_list中的模块依赖到;
+- 部件要加到对应产品的部件列表中;
+
+## 相关仓
+
+编译构建子系统
+
+**build**
+
diff --git a/dsoftbus/build/build_scripts/build.sh b/dsoftbus/build/build_scripts/build.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3514b367a06ab5e17c6b2e2cd71585d3bdc27e64
--- /dev/null
+++ b/dsoftbus/build/build_scripts/build.sh
@@ -0,0 +1,145 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+echo "++++++++++++++++++++++++++++++++++++++++"
+date +%F' '%H:%M:%S
+echo $@
+
+function help() {
+ echo
+ echo "Usage:"
+ echo " ./build.sh --product-name {product-name} [options]"
+ echo
+ echo "Examples:"
+ echo " ./build.sh --product-name Hi3516DV300 --ccache"
+ echo
+ echo "options"
+ echo " --ccache use ccache, default: false"
+ echo " --jobs N run N jobs in parallel"
+ echo " --build-target build target name"
+
+ echo " --gn-args gn args"
+ echo " --export-para export env"
+ echo " --help, -h print help info"
+ echo
+ exit 1
+}
+
+
+script_path=$(cd $(dirname $0);pwd)
+
+source_root_dir="${script_path}"
+while [[ ! -f "${source_root_dir}/.gn" ]]; do
+ source_root_dir="$(dirname "${source_root_dir}")"
+ if [[ "${source_root_dir}" == "/" ]]; then
+ echo "Cannot find source tree containing $(pwd)"
+ exit 1
+ fi
+done
+
+build_params=""
+
+while test $# -gt 0
+do
+ case "$1" in
+ --product-name)
+ shift
+ product_name="$1"
+ ;;
+ --help | -h)
+ help
+ exit 0
+ ;;
+ *)
+ build_params+=" $1"
+ ;;
+ esac
+ shift
+done
+
+
+if [[ "${source_root_dir}x" == "x" ]]; then
+ echo "Error: source_root_dir cannot be empty."
+ exit 1
+fi
+if [[ ! -d "${source_root_dir}" ]]; then
+ echo "Error: source_root_dir is incorrect."
+ exit 1
+fi
+if [[ "${product_name}x" == "x" ]]; then
+ echo -e "\033[31mError: the product name should be specified!\033[0m"
+ help
+ exit 1
+fi
+
+
+case $(uname -s) in
+ Darwin)
+ HOST_DIR="darwin-x86"
+ HOST_OS="mac"
+ ;;
+ Linux)
+ HOST_DIR="linux-x86"
+ HOST_OS="linux"
+ ;;
+ *)
+ echo "Unsupported host platform: $(uname -s)"
+ RET=1
+ exit $RET
+esac
+
+# set python3
+PYTHON3=${source_root_dir}/prebuilts/python/${HOST_DIR}/3.8.5/bin/python3
+
+if [[ ! -f "${PYTHON3}" ]]; then
+ echo -e "\033[33m Please execute the build/prebuilts_download.sh \033[0m"
+ exit 1
+fi
+
+${PYTHON3} ${source_root_dir}/build/scripts/tools_checker.py
+
+export OHOS_ROOT_PATH="${source_root_dir}"
+export PYTHON3="${PYTHON3}"
+export USE_OHOS_INIT=true
+export BUILD_IMAGE=true
+
+cd ${source_root_dir}
+
+# preloader
+${PYTHON3} ${source_root_dir}/build/loader/preloader/preloader.py \
+ --product-name ${product_name} \
+ --source-root-dir ${source_root_dir} \
+ --products-config-dir "productdefine/common/products" \
+ --preloader-output-root-dir "out/build_configs"
+
+source ${source_root_dir}/out/build_configs/${product_name}/preloader/build.prop
+
+# call build
+${source_root_dir}/build/build_scripts/build_${system_type}.sh \
+ --product-name ${product_name} \
+ --device-name ${device_name} \
+ --target-os ${target_os} \
+ --target-cpu ${target_cpu} \
+ ${build_params}
+
+if [[ "${PIPESTATUS[0]}" -ne 0 ]]; then
+ echo -e "\033[31m=====build ${product_name} error.\033[0m"
+ exit 1
+fi
+echo -e "\033[32m=====build ${product_name} successful.\033[0m"
+
+date +%F' '%H:%M:%S
+echo "++++++++++++++++++++++++++++++++++++++++"
diff --git a/dsoftbus/build/build_scripts/build_common.sh b/dsoftbus/build/build_scripts/build_common.sh
new file mode 100755
index 0000000000000000000000000000000000000000..17aa3125ce75704856bc94a32053409927d55ac0
--- /dev/null
+++ b/dsoftbus/build/build_scripts/build_common.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+function do_make_ohos() {
+ local build_cmd="build/build_scripts/build_ohos.sh"
+ build_cmd+=" product_name=${product_name} target_os=${target_os} target_cpu=${target_cpu}"
+ build_cmd+=" gn_args=is_standard_system=true"
+ if [[ "${build_target}x" != "x" ]]; then
+ for target_name in ${build_target[@]}; do
+ echo $target_name
+ build_cmd+=" build_target=$target_name"
+ done
+ elif [[ "${product_name}" == "ohos-sdk" ]]; then
+ build_cmd+=" build_target=build_ohos_sdk"
+ else
+ build_cmd+=" build_target=images"
+ fi
+
+ if [[ "${gn_args}x" != "x" ]]; then
+ for _args in ${gn_args[@]}; do
+ build_cmd+=" gn_args=$_args"
+ done
+ fi
+ if [[ "${ninja_args}x" != "x" ]]; then
+ for _args in ${ninja_args[@]}; do
+ build_cmd+=" ninja_args=$_args"
+ done
+ fi
+ if [[ "${PYCACHE_ENABLE}" == true ]]; then
+ build_cmd+=" pycache_enable=true"
+ fi
+ if [[ "${build_only_gn}" == true ]]; then
+ build_cmd+=" build_only_gn=true"
+ fi
+ if [[ "${sparse_image}" == true ]]; then
+ build_cmd+=" gn_args=sparse_image=true"
+ fi
+ echo "build_ohos_cmd: $build_cmd"
+ $build_cmd
+}
diff --git a/dsoftbus/build/build_scripts/build_ohos.sh b/dsoftbus/build/build_scripts/build_ohos.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3c30d96b640b398890813a0a1248852073f45334
--- /dev/null
+++ b/dsoftbus/build/build_scripts/build_ohos.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+set -e
+echo "++++++++++++++++++++++++++++++++++++++++"
+date +%F' '%H:%M:%S
+echo $@
+
+BIN_PATH=$(cd $(dirname $0);pwd)
+BASE_HOME=$(dirname $(dirname ${BIN_PATH}))
+BUILD_SCRIPT_DIR=${BASE_HOME}/build/core/build_scripts
+
+main()
+{
+ source ${BUILD_SCRIPT_DIR}/pre_process.sh
+ pre_process "$@"
+
+ source ${BUILD_SCRIPT_DIR}/make_main.sh
+ do_make "$@"
+
+ source ${BUILD_SCRIPT_DIR}/post_process.sh
+ post_process "$@"
+ exit $RET
+}
+
+main "$@"
diff --git a/dsoftbus/build/build_scripts/build_standard.sh b/dsoftbus/build/build_scripts/build_standard.sh
new file mode 100755
index 0000000000000000000000000000000000000000..5bcb55e00f19c19041f47284ad17260600b56c75
--- /dev/null
+++ b/dsoftbus/build/build_scripts/build_standard.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+echo "build standard system..."
+echo "---------------------------------------"
+echo $@
+
+script_path=$(cd $(dirname $0);pwd)
+
+# parse params
+source ${script_path}/parse_params.sh
+
+system_type="standard"
+
+source ${script_path}/build_common.sh
+
+# build ohos
+do_make_ohos
+
diff --git a/dsoftbus/build/build_scripts/parse_params.sh b/dsoftbus/build/build_scripts/parse_params.sh
new file mode 100755
index 0000000000000000000000000000000000000000..46674cf87524b63ce62fd20e17358afdea352a92
--- /dev/null
+++ b/dsoftbus/build/build_scripts/parse_params.sh
@@ -0,0 +1,107 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+# set default value
+target_os=ohos
+target_cpu=arm64
+use_ccache=false
+sparse_image=false
+
+
+while test $# -gt 0
+do
+ case "$1" in
+ --product-name)
+ shift
+ product_name="$1"
+ ;;
+ --device-name)
+ shift
+ device_name="$1"
+ ;;
+ --target-cpu)
+ shift
+ target_cpu="$1"
+ ;;
+ --target-os)
+ shift
+ target_os="$1"
+ ;;
+ --build-target | -t)
+ shift
+ build_target="${build_target} $1"
+ ;;
+ --gn-args)
+ shift
+ gn_args="${gn_args} $1"
+ ;;
+ --ninja-args)
+ shift
+ ninja_args="${ninja_args} $1"
+ ;;
+ --ccache)
+ use_ccache=true
+ ;;
+ --sparse-image)
+ sparse_image=true
+ ;;
+ --jobs)
+ shift
+ jobs="$1"
+ ;;
+ --export-para)
+ shift
+ PARAM1=$(echo "$1" | sed 's/\(.*\):\(.*\)/\1/')
+ PARAM2=$(echo "$1" | sed 's/.*://')
+ export $PARAM1=$PARAM2
+ ;;
+ --build-only-gn)
+ build_only_gn=true;;
+ -* | *)
+ echo "Unrecognized option: $1"
+ exit 1
+ ;;
+ esac
+ shift
+done
+
+if [[ "${product_name}x" == "x" ]]; then
+ echo "Error: the product name should be specified!"
+ exit 1
+fi
+
+if [[ "${use_ccache}" == true ]]; then
+ set +e
+ which ccache > /dev/null 2>&1
+ if [ $? -ne 0 ]; then
+ echo -e "\033[31mError: the ccache is not available, please install ccache.\033[0m"
+ exit 1
+ fi
+ source ${OHOS_ROOT_PATH}/build/core/build_scripts/set_ccache.sh
+ export CCACHE_EXEC=$(which ccache)
+ set_ccache
+ set -e
+fi
+
+if [[ "${sparse_image}" == true ]]; then
+ set +e
+ which img2simg > /dev/null 2>&1
+ if [[ $? -ne 0 ]]; then
+ echo -e "\033[31mError: the img2simg is not available, please install img2simg.\033[0m"
+ exit 1
+ fi
+ set -e
+fi
diff --git a/dsoftbus/build/bundle.json b/dsoftbus/build/bundle.json
new file mode 100644
index 0000000000000000000000000000000000000000..9b50c08cfee7a9cfae32e3ee09ac9a2a3618c044
--- /dev/null
+++ b/dsoftbus/build/bundle.json
@@ -0,0 +1,58 @@
+{
+ "name": "@ohos/build",
+ "version": "",
+ "description": "编译构建提供了一个在GN与ninja基础上的编译构建框架。支持以下功能:\r\n构建不同芯片平台的产品。如:Hi3518EV300平台的ipcamera产品,Hi3516DV300平台的ipcamera产品,Hi3861平台的wifi模组产品。构建HPM包管理配置生成的自定义产品。",
+ "homePage": "https://gitee.com/openharmony",
+ "license": "Apache V2",
+ "repository": "https://gitee.com/openharmony/build",
+ "domain": "os",
+ "language": "",
+ "publishAs": "code-segment",
+ "private": false,
+ "scripts": {},
+ "tags": [
+ "build"
+ ],
+ "keywords": [
+ "build"
+ ],
+ "envs": [],
+ "dirs": [],
+ "author": {
+ "name": "",
+ "email": "",
+ "url": ""
+ },
+ "contributors": [
+ {
+ "name": "",
+ "email": "",
+ "url": ""
+ }
+ ],
+ "segment": {
+ "destPath": "build"
+ },
+ "component": {
+ "name": "build",
+ "subsystem": "",
+ "syscap": [],
+ "features": [],
+ "adapted_system_type": [
+ "mini",
+ "small",
+ "standard"
+ ],
+ "rom": "",
+ "ram": "",
+ "deps": {
+ "components": [],
+ "third_party": []
+ },
+ "build": {
+ "sub_component": [],
+ "inner_kits": [],
+ "test": []
+ }
+ }
+}
\ No newline at end of file
diff --git a/dsoftbus/build/common/BUILD.gn b/dsoftbus/build/common/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..ddf1ad2cd182736a74182e209a09292afa1ab35d
--- /dev/null
+++ b/dsoftbus/build/common/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/ohos/config.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/ohos.gni")
+
+group("common_packages") {
+ deps = []
+ deps += [
+ "//third_party/giflib:libgif",
+ "//third_party/libpng:libpng",
+ "//third_party/sqlite:sqlite",
+ "//third_party/zlib:libz",
+ ]
+
+ deps += [ "musl:musl_install" ]
+
+ if (is_asan) {
+ deps += [
+ "asan:asan.options",
+ "asan:libclang_rt.asan.so",
+ ]
+ }
+}
diff --git a/dsoftbus/build/common/asan/BUILD.gn b/dsoftbus/build/common/asan/BUILD.gn
new file mode 100644
index 0000000000000000000000000000000000000000..7526eadf27f890db695ae328e63ebf7c7eb25e7f
--- /dev/null
+++ b/dsoftbus/build/common/asan/BUILD.gn
@@ -0,0 +1,31 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/clang/clang.gni")
+import("//build/ohos.gni")
+
+ohos_prebuilt_shared_library("libclang_rt.asan.so") {
+ if (target_cpu == "arm") {
+ source = "${clang_base_path}/lib/clang/$clang_version/lib/arm-linux-ohosmusl/libclang_rt.asan.so"
+ } else if (target_cpu == "arm64") {
+ source = "${clang_base_path}/lib/clang/$clang_version/lib/aarch64-linux-ohosmusl/libclang_rt.asan.so"
+ } else {
+ source = ""
+ }
+ part_name = "common"
+}
+
+ohos_prebuilt_etc("asan.options") {
+ source = "asan.options"
+ part_name = "common"
+}
diff --git a/dsoftbus/build/common/asan/asan.options b/dsoftbus/build/common/asan/asan.options
new file mode 100644
index 0000000000000000000000000000000000000000..b991592dae06feee31d6ccda5cc2e2c62e9d99e3
--- /dev/null
+++ b/dsoftbus/build/common/asan/asan.options
@@ -0,0 +1,13 @@
+quarantine_size_mb=16
+max_redzone=2048
+thread_local_quarantine_size_kb=256
+allow_user_segv_handler=1
+detect_odr_violation=0
+alloc_dealloc_mismatch=0
+allocator_may_return_null=1
+detect_container_overflow=0
+abort_on_error=1
+halt_on_error=1
+allow_addr2line=1
+report_globals=0
+handle_abort=1
\ No newline at end of file
diff --git a/dsoftbus/build/common/musl/BUILD.gn b/dsoftbus/build/common/musl/BUILD.gn
new file mode 100644
index 0000000000000000000000000000000000000000..a63ac66c6bc060fca6a2ddbbde5d331233c55685
--- /dev/null
+++ b/dsoftbus/build/common/musl/BUILD.gn
@@ -0,0 +1,74 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+import("//build/config/clang/clang.gni")
+import("//build/config/ohos/musl.gni")
+import("//build/ohos.gni")
+
+group("musl_install") {
+ deps = [
+ ":ld-musl-arm.so.1",
+ ":musl-libc.so",
+ ":musl-libcxx.so",
+ ":musl_ld_path_etc_cfg",
+ "updater:updater_ld-musl-arm.so.1",
+ "//third_party/musl:musl_libs",
+ ]
+}
+
+ohos_prebuilt_executable("ld-musl-arm.so.1") {
+ deps = [ "//third_party/musl:${musl_target_abi_name}_create_linker" ]
+ _muls_linker_so =
+ "${root_out_dir}/common/common/libc/ld-musl-${musl_arch}.so.1"
+ source = _muls_linker_so
+ install_enable = true
+ part_name = "common"
+}
+
+ohos_prebuilt_shared_library("musl-libc.so") {
+ deps = [ "//third_party/musl:${musl_target_abi_name}_libc_musl_shared" ]
+ source = "${musl_sysroot}/usr/lib/${musl_target_triple}/libc.so"
+ install_images = [
+ "system",
+ "updater",
+ ]
+ part_name = "common"
+}
+
+ohos_prebuilt_etc("musl_ld_path_etc_cfg") {
+ if (target_cpu == "arm") {
+ source = "ld-musl-arm.path"
+ } else {
+ source = "ld-musl-aarch64.path"
+ }
+ install_images = [
+ "system",
+ "updater",
+ ]
+ part_name = "common"
+}
+
+ohos_prebuilt_shared_library("musl-libcxx.so") {
+ if (target_cpu == "arm") {
+ source = "${clang_base_path}/lib/arm-linux-ohosmusl/c++/libc++.so"
+ } else if (target_cpu == "arm64") {
+ source = "${clang_base_path}/lib/aarch64-linux-ohosmusl/c++/libc++.so"
+ } else {
+ source = ""
+ }
+ install_images = [
+ "system",
+ "updater",
+ ]
+ part_name = "common"
+}
diff --git a/dsoftbus/build/common/musl/ld-musl-aarch64.path b/dsoftbus/build/common/musl/ld-musl-aarch64.path
new file mode 100644
index 0000000000000000000000000000000000000000..c7c664757253f5f9e9c6e8b5181f7b6cd822904f
--- /dev/null
+++ b/dsoftbus/build/common/musl/ld-musl-aarch64.path
@@ -0,0 +1 @@
+/system/lib64:/system/lib64/module:/system/lib64/module/data:/system/lib:/system/lib/module:/system/lib/module/data:/lib64:/lib:/usr/local/lib:/usr/lib
diff --git a/dsoftbus/build/common/musl/ld-musl-arm.path b/dsoftbus/build/common/musl/ld-musl-arm.path
new file mode 100644
index 0000000000000000000000000000000000000000..7dc21e513dc8373515542aad4d9ea4a702eeb1de
--- /dev/null
+++ b/dsoftbus/build/common/musl/ld-musl-arm.path
@@ -0,0 +1 @@
+/system/lib:/system/lib/module:/system/lib/module/data:/lib:/usr/local/lib:/usr/lib
diff --git a/dsoftbus/build/common/musl/updater/BUILD.gn b/dsoftbus/build/common/musl/updater/BUILD.gn
new file mode 100644
index 0000000000000000000000000000000000000000..b13fc4f28bc8e76aa19a4419f3c645bea9c532f9
--- /dev/null
+++ b/dsoftbus/build/common/musl/updater/BUILD.gn
@@ -0,0 +1,26 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License
+
+import("//build/config/ohos/musl.gni")
+import("//build/ohos.gni")
+
+ohos_prebuilt_executable("updater_ld-musl-arm.so.1") {
+ deps = [ "//third_party/musl:${musl_target_abi_name}_create_linker" ]
+ _muls_linker_so =
+ "${root_out_dir}/common/common/libc/ld-musl-${musl_arch}.so.1"
+ source = _muls_linker_so
+ install_enable = true
+ module_install_dir = "system/bin"
+ install_images = [ "updater" ]
+ part_name = "common"
+}
diff --git a/dsoftbus/build/common/ohos.build b/dsoftbus/build/common/ohos.build
new file mode 100755
index 0000000000000000000000000000000000000000..5f8a2578c4a16ae6b2a6e972b69d5bc5f7bb430e
--- /dev/null
+++ b/dsoftbus/build/common/ohos.build
@@ -0,0 +1,10 @@
+{
+ "subsystem": "common",
+ "parts": {
+ "common": {
+ "module_list": [
+ "//build/common:common_packages"
+ ]
+ }
+ }
+}
diff --git a/dsoftbus/build/config/BUILD.gn b/dsoftbus/build/config/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..ddf421d08736034d6ddc8f01406214c8904a3a3f
--- /dev/null
+++ b/dsoftbus/build/config/BUILD.gn
@@ -0,0 +1,353 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/allocator.gni")
+import("//build/config/c++/c++.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/features.gni")
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+ # When set (the default) enables C++ iterator debugging in debug builds.
+ # Iterator debugging is always off in release builds (technically, this flag
+ # affects the "debug" config, which is always available but applied by
+ # default only in debug builds).
+ #
+ # Iterator debugging is generally useful for catching bugs. But it can
+ # introduce extra locking to check the state of an iterator against the state
+ # of the current object. For iterator- and thread-heavy code, this can
+ # significantly slow execution.
+ enable_iterator_debugging = true
+}
+
+# ==============================================
+# PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+# ==============================================
+#
+# Legacy feature defines applied to all targets.
+#
+# These are applied to every single compile in the build and most of them are
+# only relevant to a few files. This bloats command lines and causes
+# unnecessary recompiles when flags are flipped.
+#
+# To pass defines to source code from the build, use the buildflag system which
+# will write headers containing the defines you need. This isolates the define
+# and means its definition can participate in the build graph, only recompiling
+# things when it actually changes.
+#
+# See //build/buildflag_header.gni for instructions on generating headers.
+#
+# This will also allow you to scope your build flag to a BUILD.gn file (or a
+# .gni file if you need it from more than one place) rather than making global
+# flags. See //build/config/BUILDCONFIG.gn for advice on where to define
+# build flags.
+config("feature_flags") {
+ # Don't use deprecated V8 APIs anywhere.
+ defines = [ "V8_DEPRECATION_WARNINGS" ]
+ if (use_udev) {
+ defines += [ "USE_UDEV" ]
+ }
+ if (is_win || is_linux) {
+ defines += [ "USE_AURA=1" ]
+ }
+ if (is_linux) {
+ defines += [
+ "USE_GLIB=1",
+ "USE_NSS_CERTS=1",
+ "USE_X11=1",
+ ]
+ }
+ if (use_allocator != "tcmalloc") {
+ defines += [ "NO_TCMALLOC" ]
+ }
+ if (is_asan || is_lsan || is_tsan || is_msan) {
+ defines += [
+ "MEMORY_TOOL_REPLACES_ALLOCATOR",
+ "MEMORY_SANITIZER_INITIAL_SIZE",
+ ]
+ }
+ if (is_asan) {
+ defines += [ "ADDRESS_SANITIZER" ]
+ }
+ if (is_lsan) {
+ defines += [ "LEAK_SANITIZER" ]
+ }
+ if (is_tsan) {
+ defines += [
+ "THREAD_SANITIZER",
+ "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
+ "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
+ ]
+ }
+ if (is_msan) {
+ defines += [ "MEMORY_SANITIZER" ]
+ }
+ if (is_ubsan || is_ubsan_null || is_ubsan_vptr || is_ubsan_security) {
+ defines += [ "UNDEFINED_SANITIZER" ]
+ }
+ if (use_clang_coverage) {
+ defines += [ "CLANG_COVERAGE" ]
+ }
+ if (is_official_build) {
+ defines += [ "OFFICIAL_BUILD" ]
+ }
+
+ # ==============================================
+ # PLEASE DO NOT ADD MORE THINGS TO THIS LIST
+ # ==============================================
+ #
+ # See the comment at the top.
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+ defines = [
+ "_DEBUG",
+ "DYNAMIC_ANNOTATIONS_ENABLED=1",
+ "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+ ]
+
+ if (is_nacl) {
+ defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
+ }
+
+ if (is_win) {
+ if (!enable_iterator_debugging) {
+ # Iterator debugging is enabled by default by the compiler on debug
+ # builds, and we have to tell it to turn it off.
+ defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+ }
+ } else if (is_linux && current_cpu == "x64" && enable_iterator_debugging) {
+ # Enable libstdc++ debugging facilities to help catch problems early, see
+ # http://crbug.com/65151 .
+ defines += [ "_GLIBCXX_DEBUG=1" ]
+ }
+}
+
+config("release") {
+ defines = [ "NDEBUG" ]
+
+ # Sanitizers.
+ if (is_tsan) {
+ defines += [
+ "DYNAMIC_ANNOTATIONS_ENABLED=1",
+ "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+ ]
+ } else {
+ defines += [ "NVALGRIND" ]
+ if (!is_nacl) {
+ # NaCl always enables dynamic annotations. Currently this value is set to
+ # 1 for all .nexes.
+ defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
+ }
+ }
+
+ if (is_ios) {
+ defines += [ "NS_BLOCK_ASSERTIONS=1" ]
+ }
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+ if (is_win) {
+ libs = [
+ "advapi32.lib",
+ "comdlg32.lib",
+ "dbghelp.lib",
+ "dnsapi.lib",
+ "gdi32.lib",
+ "msimg32.lib",
+ "odbc32.lib",
+ "odbccp32.lib",
+ "oleaut32.lib",
+ "psapi.lib",
+ "shell32.lib",
+ "shlwapi.lib",
+ "user32.lib",
+ "usp10.lib",
+ "uuid.lib",
+ "version.lib",
+ "wininet.lib",
+ "winmm.lib",
+ "winspool.lib",
+ "ws2_32.lib",
+
+ # Please don't add more stuff here. We should actually be making this
+ # list smaller, since all common things should be covered. If you need
+ # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+ # target that needs it.
+ ]
+ if (current_os == "winuwp") {
+ # These libraries are needed for Windows UWP (i.e. store apps).
+ libs += [
+ "dloadhelper.lib",
+ "WindowsApp.lib",
+ ]
+ } else {
+ # These libraries are not compatible with Windows UWP (i.e. store apps.)
+ libs += [
+ "delayimp.lib",
+ "kernel32.lib",
+ "ole32.lib",
+ ]
+ }
+ } else if (is_ohos) {
+ libs = [
+ "dl",
+ "m",
+ ]
+ } else if (is_mac) {
+ # Targets should choose to explicitly link frameworks they require. Since
+ # linking can have run-time side effects, nothing should be listed here.
+ libs = []
+ } else if (is_ios) {
+ # The libraries listed here will be specified for both the target and the
+ # host. Only the common ones should be listed here.
+ libs = [
+ "CoreFoundation.framework",
+ "CoreGraphics.framework",
+ "CoreText.framework",
+ "Foundation.framework",
+ ]
+ } else if (is_linux) {
+ libs = [
+ "dl",
+ "pthread",
+ "rt",
+ ]
+ }
+}
+
+# Only //build/config/BUILDCONFIG.gn should reference this.
+group("common_deps") {
+ public_deps = []
+
+ if (using_sanitizer) {
+ public_deps += [ "//build/config/sanitizers:deps" ]
+ }
+
+ if (use_custom_libcxx) {
+ if (is_double_framework) {
+ public_deps += [ "${asdk_libs_dir}/ndk/libcxx:libcxx" ]
+ } else {
+ public_deps += [ "//third_party/libcxx:libcxx" ]
+ }
+ }
+
+ if (use_afl) {
+ public_deps += [ "//third_party/afl" ]
+ }
+
+ if (is_ohos && use_order_profiling) {
+ public_deps += []
+ }
+
+ if (use_musl && current_toolchain != host_toolchain && !is_mingw) {
+ public_deps += [ "//third_party/musl:soft_shared_libs" ]
+ }
+}
+
+group("executable_deps") {
+ public_deps = [ ":common_deps" ]
+ if (export_libcxxabi_from_executables) {
+ if (!is_double_framework) {
+ public_deps += [ "//third_party/libcxxabi:libc++abi" ]
+ }
+ }
+}
+
+group("loadable_module_deps") {
+ public_deps = [ ":common_deps" ]
+}
+
+group("shared_library_deps") {
+ public_deps = [ ":common_deps" ]
+}
+
+group("static_library_deps") {
+ if (use_musl && current_toolchain != host_toolchain && !is_mingw) {
+ public_deps = [ "//third_party/musl:musl_headers" ]
+ }
+}
+
+group("source_set_deps") {
+ if (use_musl && current_toolchain != host_toolchain && !is_mingw) {
+ public_deps = [ "//third_party/musl:musl_headers" ]
+ }
+}
+
+# Executable configs -----------------------------------------------------------
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+ _windows_linker_configs = [
+ "//build/config/win:sdk_link",
+ "//build/config/win:common_linker_setup",
+ ]
+}
+
+# This config defines the configs applied to all executables.
+config("executable_config") {
+ configs = []
+
+ if (is_win) {
+ configs += _windows_linker_configs
+
+ # Currently only turn on linker CFI for executables.
+ configs += [ "//build/config/win:cfi_linker" ]
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:mac_dynamic_flags" ]
+ } else if (is_ios) {
+ configs += [
+ "//build/config/ios:ios_dynamic_flags",
+ "//build/config/ios:ios_executable_flags",
+ ]
+ } else if (is_linux || is_ohos || current_os == "aix") {
+ configs += [ "//build/config/gcc:executable_ldconfig" ]
+ if (is_ohos) {
+ configs += [ "//build/config/ohos:executable_config" ]
+ } else if (is_linux) {
+ configs += [ "//build/config/linux:executable_config" ]
+ }
+ }
+
+ # If we're using the prebuilt instrumented libraries with the sanitizers, we
+ # need to add ldflags to every binary to make sure they are picked up.
+ if (prebuilt_instrumented_libraries_available) {
+ configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+ }
+ if (use_locally_built_instrumented_libraries) {
+ configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+ }
+ configs += [ "//build/config/sanitizers:link_executable" ]
+}
+
+# Shared library configs -------------------------------------------------------
+
+# This config defines the configs applied to all shared libraries.
+config("shared_library_config") {
+ configs = []
+
+ if (is_win) {
+ configs += _windows_linker_configs
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:mac_dynamic_flags" ]
+ } else if (is_ios) {
+ configs += [ "//build/config/ios:ios_dynamic_flags" ]
+ }
+
+ # If we're using the prebuilt instrumented libraries with the sanitizers, we
+ # need to add ldflags to every binary to make sure they are picked up.
+ if (prebuilt_instrumented_libraries_available) {
+ configs += [ "//third_party/instrumented_libraries:prebuilt_ldflags" ]
+ }
+ if (use_locally_built_instrumented_libraries) {
+ configs += [ "//third_party/instrumented_libraries:locally_built_ldflags" ]
+ }
+ configs += [ "//build/config/sanitizers:link_shared_library" ]
+}
diff --git a/dsoftbus/build/config/BUILDCONFIG.gn b/dsoftbus/build/config/BUILDCONFIG.gn
new file mode 100755
index 0000000000000000000000000000000000000000..98bb98b8a4a3bdee972189cc8b85942b33381018
--- /dev/null
+++ b/dsoftbus/build/config/BUILDCONFIG.gn
@@ -0,0 +1,522 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# WHAT IS THIS FILE?
+# =============================================================================
+#
+# This is the master GN build configuration. This file is loaded after the
+# build args (args.gn) for the build directory and after the toplevel ".gn"
+# file (which points to this file as the build configuration).
+#
+# This file will be executed and the resulting context will be used to execute
+# every other file in the build. So variables declared here (that don't start
+# with an underscore) will be implicitly global.
+
+# =============================================================================
+# PLATFORM SELECTION
+# =============================================================================
+#
+# There are two main things to set: "os" and "cpu". The "toolchain" is the name
+# of the GN thing that encodes combinations of these things.
+#
+# Users typically only set the variables "target_os" and "target_cpu" in "gn
+# args", the rest are set up by our build and internal to GN.
+#
+# There are three different types of each of these things: The "host"
+# represents the computer doing the compile and never changes. The "target"
+# represents the main thing we're trying to build. The "current" represents
+# which configuration is currently being defined, which can be either the
+# host, the target, or something completely different (like nacl). GN will
+# run the same build file multiple times for the different required
+# configuration in the same build.
+#
+# This gives the following variables:
+# - host_os, host_cpu, host_toolchain
+# - target_os, target_cpu, default_toolchain
+# - current_os, current_cpu, current_toolchain.
+#
+# Note the default_toolchain isn't symmetrical (you would expect
+# target_toolchain). This is because the "default" toolchain is a GN built-in
+# concept, and "target" is something our build sets up that's symmetrical with
+# its GYP counterpart. Potentially the built-in default_toolchain variable
+# could be renamed in the future.
+#
+# When writing build files, to do something only for the host:
+# if (current_toolchain == host_toolchain) { ...
+
+if (target_os == "") {
+ target_os = host_os
+}
+
+if (target_cpu == "") {
+ if (target_os == "ohos") {
+ target_cpu = "arm"
+ } else {
+ target_cpu = host_cpu
+ }
+}
+
+if (current_cpu == "") {
+ current_cpu = target_cpu
+}
+if (current_os == "") {
+ current_os = target_os
+}
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given in a declare_args block, otherwise the default will be used.
+#
+# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
+# the build to declare build flags. If you need a flag for a single component,
+# you can just declare it in the corresponding BUILD.gn file.
+#
+# - If your feature is a single target, say //components/foo, you can put
+# a declare_args() block in //components/foo/BUILD.gn and use it there.
+# Nobody else in the build needs to see the flag.
+#
+# - Defines based on build variables should be implemented via the generated
+# build flag header system. See //build/buildflag_header.gni. You can put
+# the buildflag_header target in the same file as the build flag itself. You
+# should almost never set "defines" directly.
+#
+# - If your flag toggles a target on and off or toggles between different
+# versions of similar things, write a "group" target that forwards to the
+# right target (or no target) depending on the value of the build flag. This
+# group can be in the same BUILD.gn file as the build flag, and targets can
+# depend unconditionally on the group rather than duplicating flag checks
+# across many targets.
+#
+# - If a semi-random set of build files REALLY needs to know about a define and
+# the above pattern for isolating the build logic in a forwarding group
+# doesn't work, you can put the argument in a .gni file. This should be put
+# in the lowest level of the build that knows about this feature (which should
+# almost always be outside of the //build directory!).
+#
+# Other flag advice:
+#
+# - Use boolean values when possible. If you need a default value that expands
+# to some complex thing in the default case (like the location of the
+# compiler which would be computed by a script), use a default value of -1 or
+# the empty string. Outside of the declare_args block, conditionally expand
+# the default value as necessary.
+#
+# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
+# your feature) rather than just "foo".
+#
+# - Write good comments directly above the declaration with no blank line.
+# These comments will appear as documentation in "gn args --list".
+#
+# - Don't call exec_script inside declare_args. This will execute the script
+# even if the value is overridden, which is wasteful. See first bullet.
+
+declare_args() {
+ # Set to enable the official build level of optimization. This has nothing
+ # to do with branding, but enables an additional level of optimization above
+ # release (!is_debug). This might be better expressed as a tri-state
+ # (debug, release, official) but for historical reasons there are two
+ # separate flags.
+ is_official_build = false
+
+ # Whether we're a traditional desktop unix.
+ is_desktop_linux = current_os == "linux"
+
+ # Set to true when compiling with the Clang compiler.
+ is_clang = current_os != "linux" ||
+ (current_cpu != "s390x" && current_cpu != "s390" &&
+ current_cpu != "ppc64" && current_cpu != "ppc" &&
+ current_cpu != "mips" && current_cpu != "mips64")
+
+ # Allows the path to a custom target toolchain to be injected as a single
+ # argument, and set as the default toolchain.
+ custom_toolchain = ""
+
+ # This should not normally be set as a build argument. It's here so that
+ # every toolchain can pass through the "global" value via toolchain_args().
+ host_toolchain = ""
+
+ # target platform
+ target_platform = "phone"
+
+ # Whether it is test.
+ is_test = false
+
+ # Whether it is double framework.
+ is_double_framework = false
+}
+
+declare_args() {
+ use_musl = true
+}
+
+asdk_libs_dir = "//prebuilts/asdk_libs"
+
+# Whether it is a phone product.
+is_phone_product = "${target_platform}" == "phone"
+
+# Whether it is a ivi product.
+is_ivi_product = "${target_platform}" == "ivi"
+
+is_wearable_product = "${target_platform}" == "wearable"
+
+is_intellitv_product = "${target_platform}" == "intellitv"
+
+is_emulator = false
+
+if (target_os == "ohos" && target_cpu == "x86_64") {
+ is_emulator = true
+}
+
+# different host platform tools directory.
+if (host_os == "linux") {
+ host_platform_dir = "linux-x86_64"
+} else if (host_os == "mac") {
+ host_platform_dir = "darwin-x86_64"
+} else {
+ assert(false, "Unsupported host_os: $host_os")
+}
+
+declare_args() {
+ # Debug build. Enabling official builds automatically sets is_debug to false.
+ is_debug = !is_official_build
+}
+
+declare_args() {
+ # Component build. Setting to true compiles targets declared as "components"
+ # as shared libraries loaded dynamically. This speeds up development time.
+ # When false, components will be linked statically.
+ #
+ # For more information see
+ # https://chromium.googlesource.com/chromium/src/+/master/docs/component_build.md
+ is_component_build = is_debug && current_os != "ios"
+}
+
+assert(!(is_debug && is_official_build), "Can't do official debug builds")
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+#
+# We do this before anything else to make sure we complain about any
+# unsupported os/cpu combinations as early as possible.
+
+if (host_toolchain == "") {
+ # This should only happen in the top-level context.
+ # In a specific toolchain context, the toolchain_args()
+ # block should have propagated a value down.
+
+ if (host_os == "linux") {
+ if (target_os != "linux") {
+ host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+ } else if (is_clang) {
+ host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+ } else {
+ host_toolchain = "//build/toolchain/linux:$host_cpu"
+ }
+ } else if (host_os == "mac") {
+ host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
+ } else if (host_os == "win") {
+ if (target_cpu == "x86" || target_cpu == "x64") {
+ if (is_clang) {
+ host_toolchain = "//build/toolchain/win:win_clang_$target_cpu"
+ } else {
+ host_toolchain = "//build/toolchain/win:$target_cpu"
+ }
+ } else if (is_clang) {
+ host_toolchain = "//build/toolchain/win:win_clang_$host_cpu"
+ } else {
+ host_toolchain = "//build/toolchain/win:$host_cpu"
+ }
+ } else {
+ assert(false, "Unsupported host_os: $host_os")
+ }
+}
+
+_default_toolchain = ""
+
+if (target_os == "ohos") {
+ assert(host_os == "linux" || host_os == "mac",
+ "ohos builds are only supported on Linux and Mac hosts.")
+ _default_toolchain = "//build/toolchain/ohos:ohos_clang_$target_cpu"
+} else if (target_os == "linux") {
+ if (is_clang) {
+ _default_toolchain = "//build/toolchain/linux:clang_$target_cpu"
+ } else {
+ _default_toolchain = "//build/toolchain/linux:$target_cpu"
+ }
+} else {
+ assert(false, "Unsupported target_os: $target_os")
+}
+
+# If a custom toolchain has been set in the args, set it as default. Otherwise,
+# set the default toolchain for the platform (if any).
+if (custom_toolchain != "") {
+ set_default_toolchain(custom_toolchain)
+} else if (_default_toolchain != "") {
+ set_default_toolchain(_default_toolchain)
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_ohos, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+# except Windows).
+# - is_linux is true for desktop Linux and ChromeOS.
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+if (current_os == "win" || current_os == "winuwp") {
+ is_aix = false
+ is_ohos = false
+ is_chromeos = false
+ is_ios = false
+ is_linux = false
+ is_mac = false
+ is_nacl = false
+ is_posix = false
+ is_win = true
+ is_mingw = false
+} else if (current_os == "mac") {
+ is_aix = false
+ is_ohos = false
+ is_chromeos = false
+ is_ios = false
+ is_linux = false
+ is_mac = true
+ is_nacl = false
+ is_posix = true
+ is_win = false
+ is_mingw = false
+} else if (current_os == "ohos") {
+ is_aix = false
+ is_ohos = true
+ is_chromeos = false
+ is_ios = false
+ is_linux = false
+ is_mac = false
+ is_nacl = false
+ is_posix = true
+ is_win = false
+ is_mingw = false
+} else if (current_os == "linux") {
+ is_aix = false
+ is_ohos = false
+ is_chromeos = false
+ is_ios = false
+ is_linux = true
+ is_mac = false
+ is_nacl = false
+ is_posix = true
+ is_win = false
+ is_mingw = false
+} else if (current_os == "mingw") {
+ is_aix = false
+ is_ohos = false
+ is_chromeos = false
+ is_ios = false
+ is_linux = false
+ is_mac = false
+ is_nacl = false
+ is_posix = true
+ is_win = false
+ is_mingw = true
+}
+
+# =============================================================================
+# SOURCES FILTERS
+# =============================================================================
+#
+# These patterns filter out platform-specific files when assigning to the
+# sources variable. The magic variable |sources_assignment_filter| is applied
+# to each assignment or appending to the sources variable and matches are
+# automatically removed.
+#
+# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
+# boundary = end of string or slash) are supported, and the entire string
+# must match the pattern (so you need "*.cc" to match all .cc files, for
+# example).
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+sources_assignment_filter = []
+
+if (!is_win && !is_mingw) {
+ sources_assignment_filter += [
+ "*_win.cc",
+ "*_win.h",
+ "*_win_unittest.cc",
+ "*\bwin/*",
+ "*.def",
+ ]
+}
+if (!is_mac) {
+ sources_assignment_filter += [
+ "*_mac.h",
+ "*_mac.cc",
+ "*_mac.mm",
+ "*_mac_unittest.h",
+ "*_mac_unittest.cc",
+ "*_mac_unittest.mm",
+ "*\bmac/*",
+ "*_cocoa.h",
+ "*_cocoa.cc",
+ "*_cocoa.mm",
+ "*_cocoa_unittest.h",
+ "*_cocoa_unittest.cc",
+ "*_cocoa_unittest.mm",
+ "*\bcocoa/*",
+ ]
+}
+if (!is_linux && !is_ohos) {
+ sources_assignment_filter += [
+ "*_linux.h",
+ "*_linux.cc",
+ "*_linux_unittest.h",
+ "*_linux_unittest.cc",
+ "*\blinux/*",
+ ]
+}
+if (!is_ohos) {
+ sources_assignment_filter += []
+}
+
+set_sources_assignment_filter(sources_assignment_filter)
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+#
+# WHAT GOES HERE?
+#
+# Other than the main compiler and linker configs, the only reason for a config
+# to be in this list is if some targets need to explicitly override that config
+# by removing it. This is how targets opt-out of flags. If you don't have that
+# requirement and just need to add a config everywhere, reference it as a
+# sub-config of an existing one, most commonly the main "compiler" one.
+
+# Holds all configs used for running the compiler.
+default_compiler_configs = [
+ "//build/config:feature_flags",
+ "//build/config/compiler:afdo",
+ "//build/config/compiler:afdo_optimize_size",
+ "//build/config/compiler:compiler",
+ "//build/config/compiler:compiler_arm_fpu",
+ "//build/config/compiler:compiler_arm_thumb",
+ "//build/config/compiler:chromium_code",
+ "//build/config/compiler:default_include_dirs",
+ "//build/config/compiler:default_optimization",
+ "//build/config/compiler:default_stack_frames",
+ "//build/config/compiler:default_symbols",
+ "//build/config/compiler:export_dynamic",
+ "//build/config/compiler:no_exceptions",
+ "//build/config/compiler:no_rtti",
+ "//build/config/compiler:runtime_library",
+ "//build/config/compiler:thin_archive",
+ "//build/config/compiler:no_common",
+ "//build/config/coverage:default_coverage",
+ "//build/config/sanitizers:default_sanitizer_flags",
+]
+
+if (is_ohos) {
+ default_compiler_configs += [
+ "//build/config/ohos:default_orderfile_instrumentation",
+ "//build/config/gcc:symbol_visibility_inline_hidden",
+ ]
+}
+
+if (is_clang) {
+ default_compiler_configs += [
+ "//build/config/clang:find_bad_constructs",
+ "//build/config/clang:extra_warnings",
+ ]
+}
+
+# Debug/release-related defines.
+if (is_debug) {
+ default_compiler_configs += [ "//build/config:debug" ]
+} else {
+ default_compiler_configs += [ "//build/config:release" ]
+}
+
+# Static libraries and source sets use only the compiler ones.
+set_defaults("static_library") {
+ configs = default_compiler_configs
+}
+set_defaults("source_set") {
+ configs = default_compiler_configs
+}
+
+# Executable defaults.
+default_executable_configs = default_compiler_configs + [
+ "//build/config:default_libs",
+ "//build/config:executable_config",
+ ]
+set_defaults("executable") {
+ configs = default_executable_configs
+}
+
+# Shared library and loadable module defaults (also for components in component
+# mode).
+default_shared_library_configs = default_compiler_configs + [
+ "//build/config:default_libs",
+ "//build/config:shared_library_config",
+ ]
+
+set_defaults("shared_library") {
+ configs = default_shared_library_configs
+}
+set_defaults("loadable_module") {
+ configs = default_shared_library_configs
+}
+
+# Sets default dependencies for executable and shared_library targets.
+#
+# Variables
+# no_default_deps: If true, no standard dependencies will be added.
+target_type_list = [
+ "executable",
+ "loadable_module",
+ "shared_library",
+ "static_library",
+ "source_set",
+]
+
+foreach(_target_type, target_type_list) {
+ template(_target_type) {
+ target(_target_type, target_name) {
+ forward_variables_from(invoker, "*", [ "no_default_deps" ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!defined(invoker.no_default_deps) || !invoker.no_default_deps) {
+ deps += [ "//build/config:${_target_type}_deps" ]
+ }
+ }
+ }
+}
diff --git a/dsoftbus/build/config/allocator.gni b/dsoftbus/build/config/allocator.gni
new file mode 100755
index 0000000000000000000000000000000000000000..d7ae92d0774d4950dc4403da1b433a0997020fba
--- /dev/null
+++ b/dsoftbus/build/config/allocator.gni
@@ -0,0 +1,58 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+# Temporarily disable tcmalloc on arm64 linux to get rid of compilation errors.
+if (is_ohos || is_mac || is_ios || is_asan || is_lsan || is_tsan || is_msan ||
+ is_win || (is_linux && target_cpu == "arm64")) {
+ _default_allocator = "none"
+} else {
+ _default_allocator = "tcmalloc"
+}
+
+# The debug CRT on Windows has some debug features that are incompatible with
+# the shim. NaCl in particular does seem to link some binaries statically
+# against the debug CRT with "is_nacl=false".
+if ((is_linux || is_ohos || is_mac ||
+ (is_win && !is_component_build && !is_debug)) && !is_asan && !is_lsan &&
+ !is_tsan && !is_msan) {
+ _default_use_allocator_shim = true
+} else {
+ _default_use_allocator_shim = false
+}
+
+declare_args() {
+ # Memory allocator to use. Set to "none" to use default allocator.
+ use_allocator = _default_allocator
+
+ # Causes all the allocations to be routed via allocator_shim.cc.
+ use_allocator_shim = _default_use_allocator_shim
+
+ # Partition alloc is included by default except iOS.
+ use_partition_alloc = !is_ios
+
+ # Use the new tcmalloc. It's relevant only when use_allocator == "tcmalloc".
+ use_new_tcmalloc = false
+}
+
+if (is_nacl) {
+ # Turn off the build flag for NaCL builds to minimize confusion, as NaCL
+ # doesn't support the heap shim.
+ use_allocator_shim = false
+}
+
+assert(use_allocator == "none" || use_allocator == "tcmalloc")
+
+assert(!is_win || use_allocator == "none", "Tcmalloc doesn't work on Windows.")
+assert(!is_mac || use_allocator == "none", "Tcmalloc doesn't work on macOS.")
+
+assert(
+ !use_allocator_shim || is_linux || is_ohos || is_win || is_mac,
+ "use_allocator_shim is supported only on Linux, Windows and macOS targets")
+
+if (is_win && use_allocator_shim) {
+ assert(!is_component_build,
+ "The allocator shim doesn't work for the component build on Windows.")
+}
diff --git a/dsoftbus/build/config/arm.gni b/dsoftbus/build/config/arm.gni
new file mode 100755
index 0000000000000000000000000000000000000000..eafbf1ff72c020f564bdac6378ae380857f170ef
--- /dev/null
+++ b/dsoftbus/build/config/arm.gni
@@ -0,0 +1,126 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/v8_target_cpu.gni")
+
+# These are primarily relevant in current_cpu == "arm" contexts, where
+# ARM code is being compiled. But they can also be relevant in the
+# other contexts when the code will change its behavior based on the
+# cpu it wants to generate code for.
+if (current_cpu == "arm" || v8_current_cpu == "arm") {
+ declare_args() {
+ # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+ # platforms.
+ arm_version = 7
+
+ # The ARM architecture. This will be a string like "armv6" or "armv7-a".
+ # An empty string means to use the default for the arm_version.
+ arm_arch = ""
+
+ # The ARM floating point hardware. This will be a string like "neon" or
+ # "vfpv3". An empty string means to use the default for the arm_version.
+ arm_fpu = ""
+
+ # The ARM floating point mode. This is either the string "hard", "soft", or
+ # "softfp". An empty string means to use the default one for the
+ # arm_version.
+ arm_float_abi = ""
+
+ # The ARM variant-specific tuning mode. This will be a string like "armv6"
+ # or "cortex-a15". An empty string means to use the default for the
+ # arm_version.
+ arm_tune = ""
+
+ # Whether to use the neon FPU instruction set or not.
+ arm_use_neon = ""
+
+ # Whether to enable optional NEON code paths.
+ arm_optionally_use_neon = false
+
+ # Thumb is a reduced instruction set available on some ARM processors that
+ # has increased code density.
+ arm_use_thumb = true
+ }
+
+ assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+ arm_float_abi == "soft" || arm_float_abi == "softfp")
+
+ if (arm_use_neon == "") {
+ if (current_os == "linux" && target_cpu != v8_target_cpu) {
+ # Don't use neon on V8 simulator builds as a default.
+ arm_use_neon = false
+ } else {
+ arm_use_neon = true
+ }
+ }
+
+ if (arm_version == 6) {
+ if (arm_arch == "") {
+ arm_arch = "armv6"
+ }
+ if (arm_tune != "") {
+ arm_tune = ""
+ }
+ if (arm_float_abi == "") {
+ arm_float_abi = "softfp"
+ }
+ if (arm_fpu == "") {
+ arm_fpu = "vfp"
+ }
+ arm_use_thumb = false
+ arm_use_neon = false
+ } else if (arm_version == 7) {
+ if (arm_arch == "") {
+ arm_arch = "armv7-a"
+ }
+ if (arm_tune == "") {
+ arm_tune = "generic-armv7-a"
+ }
+
+ if (arm_float_abi == "") {
+ if (current_os == "ohos" || target_os == "ohos") {
+ arm_float_abi = "softfp"
+ } else if (current_os == "linux" && target_cpu != v8_target_cpu) {
+ arm_float_abi = "softfp"
+ } else {
+ arm_float_abi = "hard"
+ }
+ }
+
+ if (arm_fpu == "") {
+ if (arm_use_neon) {
+ arm_fpu = "neon"
+ } else {
+ arm_fpu = "vfpv3-d16"
+ }
+ }
+ } else if (arm_version == 8) {
+ if (arm_arch == "") {
+ arm_arch = "armv8-a"
+ }
+ if (arm_tune == "") {
+ arm_tune = "generic-armv8-a"
+ }
+
+ if (arm_float_abi == "") {
+ if (current_os == "ohos" || target_os == "ohos") {
+ arm_float_abi = "softfp"
+ } else {
+ arm_float_abi = "hard"
+ }
+ }
+
+ if (arm_fpu == "") {
+ if (arm_use_neon) {
+ arm_fpu = "neon"
+ } else {
+ arm_fpu = "vfpv3-d16"
+ }
+ }
+ }
+} else if (current_cpu == "arm64" || v8_current_cpu == "arm64") {
+ # arm64 supports only "hard".
+ arm_float_abi = "hard"
+ arm_use_neon = true
+}
diff --git a/dsoftbus/build/config/c++/c++.gni b/dsoftbus/build/config/c++/c++.gni
new file mode 100755
index 0000000000000000000000000000000000000000..d14def9f2541f8e6f3ba30edc5eab3c3c289d7f9
--- /dev/null
+++ b/dsoftbus/build/config/c++/c++.gni
@@ -0,0 +1,29 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+ # Use libc++ (//third_party/libcxx)
+ use_custom_libcxx = false
+
+ use_custom_libcxx_for_host = false
+}
+
+use_custom_libcxx =
+ use_custom_libcxx || (use_custom_libcxx_for_host && current_cpu == host_cpu)
+use_custom_libcxx = use_custom_libcxx && !is_nacl
+
+# libc++abi needs to be exported from executables to be picked up by shared
+# libraries on certain instrumented builds.
+export_libcxxabi_from_executables =
+ use_custom_libcxx && !is_component_build && (is_asan || is_ubsan_vptr)
+
+libcxx_prefix = "//third_party/libcxx"
+libcxxabi_prefix = "//third_party/libcxxabi"
+
+if (is_double_framework) {
+ libcxx_prefix = "${asdk_libs_dir}/ndk/libcxx"
+ libcxxabi_prefix = "${asdk_libs_dir}/ndk/libcxxabi"
+}
diff --git a/dsoftbus/build/config/clang/BUILD.gn b/dsoftbus/build/config/clang/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..d4c10f6794d99e54a1729a6ea2e254e2fafb8c02
--- /dev/null
+++ b/dsoftbus/build/config/clang/BUILD.gn
@@ -0,0 +1,73 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+ if (clang_use_chrome_plugins) {
+ cflags = []
+
+ # On Windows, the plugin is built directly into clang, so there's
+ # no need to load it dynamically.
+ if (host_os == "mac") {
+ cflags += [
+ "-Xclang",
+ "-load",
+ "-Xclang",
+ rebase_path("${clang_base_path}/lib/libFindBadConstructs.dylib",
+ root_build_dir),
+ ]
+ } else if (host_os == "linux") {
+ cflags += [
+ "-Xclang",
+ "-load",
+ "-Xclang",
+ rebase_path("${clang_base_path}/lib/libFindBadConstructs.so",
+ root_build_dir),
+ ]
+ }
+
+ cflags += [
+ "-Xclang",
+ "-add-plugin",
+ "-Xclang",
+ "find-bad-constructs",
+ ]
+
+ cflags += [
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "enforce-in-thirdparty-webkit",
+ ]
+
+ cflags += [
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "check-enum-max-value",
+ ]
+
+ if (is_linux || is_ohos) {
+ cflags += [
+ "-Xclang",
+ "-plugin-arg-find-bad-constructs",
+ "-Xclang",
+ "check-ipc",
+ ]
+ }
+ }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+ cflags = [
+ "-Wheader-hygiene",
+
+ # Warns when a const char[] is converted to bool.
+ "-Wstring-conversion",
+ "-Wtautological-overlap-compare",
+ ]
+}
diff --git a/dsoftbus/build/config/clang/clang.gni b/dsoftbus/build/config/clang/clang.gni
new file mode 100755
index 0000000000000000000000000000000000000000..5a124ffd3e9a04574f3548348fdfe66ec0210653
--- /dev/null
+++ b/dsoftbus/build/config/clang/clang.gni
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+default_clang_base_path = "//prebuilts/clang/ohos/${host_platform_dir}/llvm"
+
+declare_args() {
+ # Indicates if the build should use the Chrome-specific plugins for enforcing
+ # coding guidelines, etc. Only used when compiling with Clang.
+ #clang_use_chrome_plugins = is_clang && !is_nacl && !use_xcode_clang
+ clang_use_chrome_plugins = false
+
+ clang_base_path = default_clang_base_path
+}
diff --git a/dsoftbus/build/config/compiler/BUILD.gn b/dsoftbus/build/config/compiler/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..14c07bb42b376fbc24652fd4def15af4c669c980
--- /dev/null
+++ b/dsoftbus/build/config/compiler/BUILD.gn
@@ -0,0 +1,1775 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/coverage/coverage.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/toolchain.gni")
+
+import("//build/misc/overrides/build.gni")
+
+if (current_cpu == "arm" || current_cpu == "arm64") {
+ import("//build/config/arm.gni")
+}
+if (is_ohos) {
+ import("//build/config/ohos/config.gni")
+}
+if (is_mac) {
+ import("//build/config/mac/symbols.gni")
+}
+
+declare_args() {
+ # Default to warnings as errors for default workflow, where we catch
+ # warnings with known toolchains. Allow overriding this e.g. for Chromium
+ # builds on Linux that could use a different version of the compiler.
+ # With GCC, warnings in no-Chromium code are always not treated as errors.
+ treat_warnings_as_errors = true
+
+ # Whether to use the binary binutils checked into third_party/binutils.
+ # These are not multi-arch so cannot be used except on x86 and x86-64 (the
+ # only two architectures that are currently checked in). Turn this off when
+ # you are using a custom toolchain and need to control -B in cflags.
+ linux_use_bundled_binutils =
+ linux_use_bundled_binutils_override && is_linux &&
+ (current_cpu == "x64" || current_cpu == "x86")
+ binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+ root_build_dir)
+
+ # Compile in such a way as to make it possible for the profiler to unwind full
+ # stack frames. Setting this flag has a large effect on the performance of the
+ # generated code than just setting profiling, but gives the profiler more
+ # information to analyze.
+ # Requires profiling to be set to true.
+ enable_full_stack_frames_for_profiling = false
+
+ # When we are going to use gold we need to find it.
+ # This is initialized below, after use_gold might have been overridden.
+ gold_path = false
+
+ if (is_win) {
+ # Whether the VS xtree header has been patched to disable warning 4702. If
+ # it has, then we don't need to disable 4702 (unreachable code warning).
+ # The patch is preapplied to the internal toolchain and hence all bots.
+ msvs_xtree_patched = false
+ }
+
+ # Enable fatal linker warnings. Building Chromium with certain versions
+ # of binutils can cause linker warning.
+ # See: https://bugs.chromium.org/p/chromium/issues/detail?id=457359
+ fatal_linker_warnings = true
+
+ # Build with C++ RTTI enabled. Chromium builds without RTTI by default,
+ # but some sanitizers are known to require it, like CFI diagnostics
+ # and UBsan variants.
+ use_rtti = use_cfi_diag || is_ubsan_vptr || is_ubsan_security
+
+ # AFDO (Automatic Feedback Directed Optimizer) is a form of profile-guided
+ # optimization that GCC supports. It used by ChromeOS in their official
+ # builds. To use it, set auto_profile_path to the path to a file containing
+ # the needed gcov profiling data.
+ auto_profile_path = ""
+
+ # Optimize symbol files for maximizing goma cache hit rate. This is on by
+ # default when goma is enabled on Linux and Windows.
+ # But setting this to true may make it harder to debug binaries on Linux.
+ # See below reference for detail.
+ strip_absolute_paths_from_debug_symbols = false
+
+ # Allow projects that wish to stay on C++11 to override Chromium's default.
+ use_cxx11 = false
+
+ # Path to an AFDO profile to use while building with clang, if any. Empty
+ # implies none.
+ clang_sample_profile_path = ""
+
+ # Some configurations have default sample profiles. If this is true and
+ # clang_sample_profile_path is empty, we'll fall back to the default.
+ #
+ # We currently only have default profiles for Chromium in-tree, so we disable
+ # this by default for all downstream projects, since these profiles are likely
+ # nonsensical for said projects.
+ clang_use_default_sample_profile =
+ is_official_build && (is_ohos || is_desktop_linux)
+
+ # Turn this on to have the compiler output extra timing information.
+ compiler_timing = false
+
+ # Set to true to pass --no-rosegment to lld. This is a workaround
+ # for a KI issue in Valgrind,
+ # https://bugs.kde.org/show_bug.cgi?id=384727
+ ro_segment_workaround_for_valgrind = false
+
+ # Turn this on to use ghash feature of lld for faster debug link on Windows.
+ # http://blog.llvm.org/2018/01/improving-link-time-on-windows-with.html
+ use_ghash = false
+
+ # Whether to enable ThinLTO optimizations. Turning ThinLTO optimizations on
+ # can substantially increase link time and binary size, but they generally
+ # also make binaries a fair bit faster.
+ thin_lto_enable_optimizations = is_chromeos
+
+ # By default only the binaries in official builds get build IDs.
+ force_local_build_id = true
+}
+
+declare_args() {
+ use_cxx11_on_ohos = use_cxx11
+}
+
+declare_args() {
+ # Set to true to use icf, Identical Code Folding.
+ #
+ # icf=all is broken in older golds, see
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=17704
+ # See also https://crbug.com/663886
+ # `linux_use_bundled_binutils` is to avoid breaking Linux destroys which may
+ # still have a buggy gold.
+ # chromeos binutils has been patched with the fix, so always use icf there.
+ # The bug only affects x86 and x64, so we can still use ICF when targeting
+ # other architectures.
+ #
+ # lld doesn't have the bug.
+ use_icf =
+ is_posix && !using_sanitizer && !use_clang_coverage &&
+ !(is_ohos && use_order_profiling) &&
+ (use_lld ||
+ (use_gold && ((!is_ohos && linux_use_bundled_binutils) || is_chromeos ||
+ !(current_cpu == "x86" || current_cpu == "x64"))))
+}
+
+# Apply the default logic for these values if they were not set explicitly.
+if (gold_path == false) {
+ if (use_gold) {
+ gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+ root_build_dir)
+ } else {
+ gold_path = ""
+ }
+}
+
+if (use_debug_fission == "default") {
+ use_debug_fission = is_debug && !is_ohos && !is_win &&
+ (use_gold || use_lld) && cc_wrapper == ""
+}
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+ include_dirs = [
+ "//",
+ root_gen_dir,
+ ]
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discussion about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+ asmflags = []
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ cflags_objc = []
+ cflags_objcc = []
+ ldflags = []
+ defines = []
+ configs = []
+ inputs = []
+
+ # System-specific flags. If your compiler flags apply to one of the
+ # categories here, add it to the associated file to keep this shared config
+ # smaller.
+ if (is_win) {
+ configs += [ "//build/config/win:compiler" ]
+ } else if (is_ohos) {
+ configs += [ "//build/config/ohos:compiler" ]
+ } else if (is_linux) {
+ configs += [ "//build/config/linux:compiler" ]
+ } else if (is_nacl) {
+ configs += [ "//build/config/nacl:compiler" ]
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:compiler" ]
+ } else if (is_ios) {
+ configs += [ "//build/config/ios:compiler" ]
+ } else if (current_os == "aix") {
+ configs += [ "//build/config/aix:compiler" ]
+ } else if (is_mingw) {
+ configs += [ "//build/config/mingw:compiler" ]
+ }
+
+ configs += [
+ # See the definitions below.
+ ":compiler_cpu_abi",
+ ":compiler_codegen",
+ ]
+
+ # In general, Windows is totally different, but all the other builds share
+ # some common GCC configuration.
+ if (!is_win) {
+ # Common POSIX compiler flags setup.
+ # --------------------------------
+ cflags += [ "-fno-strict-aliasing" ] # See http://crbug.com/32204
+
+ # Stack protection.
+ if (is_mac) {
+ # The strong variant of the stack protector significantly increases
+ # binary size, so only enable it in debug mode.
+ if (is_debug) {
+ cflags += [ "-fstack-protector-strong" ]
+ } else {
+ cflags += [ "-fstack-protector" ]
+ }
+ } else if (is_posix && !is_chromeos && !is_nacl) {
+ cflags += [ "--param=ssp-buffer-size=4" ]
+
+ # The x86 toolchain currently has problems with stack-protector.
+ if (is_ohos && current_cpu == "x86") {
+ cflags += [ "-fno-stack-protector" ]
+ } else if (is_mingw) {
+ cflags += [ "-fno-stack-protector" ]
+ } else if (current_os != "aix") {
+ # Not available on aix.
+ cflags += [ "-fstack-protector-strong" ]
+ }
+ }
+
+ # Linker warnings.
+ if (fatal_linker_warnings && !(is_chromeos && current_cpu == "arm") &&
+ !(is_ohos && use_order_profiling) && !is_mac && !is_ios &&
+ current_os != "aix" && !is_mingw) {
+ ldflags += [ "-Wl,--fatal-warnings" ]
+ }
+ } else {
+ cflags += [
+ # Assume UTF-8 by default to avoid code page dependencies.
+ "/utf-8",
+ ]
+ if (is_clang) {
+ # Don't look for includes in %INCLUDE%.
+ cflags += [ "/X" ]
+ }
+ }
+
+ # Eliminate build metadata (__DATE__, __TIME__ and __TIMESTAMP__) for
+ # deterministic build. See https://crbug.com/314403
+ if (!is_official_build) {
+ if (is_win && !is_clang) {
+ cflags += [
+ "/wd4117", # Trying to define or undefine a predefined macro.
+ "/D__DATE__=",
+ "/D__TIME__=",
+ "/D__TIMESTAMP__=",
+ ]
+ } else {
+ cflags += [
+ "-Wno-builtin-macro-redefined",
+ "-D__DATE__=",
+ "-D__TIME__=",
+ "-D__TIMESTAMP__=",
+ ]
+ }
+ }
+
+ if (is_clang && is_debug) {
+ # Allow comparing the address of references and 'this' against 0
+ # in debug builds. Technically, these can never be null in
+ # well-defined C/C++ and Clang can optimize such checks away in
+ # release builds, but they may be used in asserts in debug builds.
+ cflags_cc += [
+ "-Wno-undefined-bool-conversion",
+ "-Wno-tautological-undefined-compare",
+ ]
+ }
+
+ if (is_posix && !(is_mac || is_ios)) {
+ if (enable_profiling) {
+ if (!is_debug) {
+ cflags += [ "-g" ]
+
+ if (enable_full_stack_frames_for_profiling) {
+ cflags += [
+ "-fno-inline",
+ "-fno-optimize-sibling-calls",
+ ]
+ }
+ }
+ }
+
+ if (!is_mingw && (is_official_build || force_local_build_id)) {
+ # Explicitly pass --build-id to ld. Compilers used to always pass this
+ # implicitly but don't any more (in particular clang when built without
+ # ENABLE_LINKER_BUILD_ID=ON). The crash infrastructure does need a build
+ # id, so explicitly enable it in official builds. It's not needed in
+ # unofficial builds and computing it does slow down the link, so go with
+ # faster links in unofficial builds.
+ ldflags += [ "-Wl,--build-id=md5" ]
+ }
+
+ if (!is_ohos) {
+ defines += [
+ "_FILE_OFFSET_BITS=64",
+ "_LARGEFILE_SOURCE",
+ "_LARGEFILE64_SOURCE",
+ ]
+ }
+
+ if (!is_nacl) {
+ cflags += [ "-funwind-tables" ]
+ }
+ }
+
+ if (is_linux || is_ohos) {
+ if (use_pic) {
+ cflags += [ "-fPIC" ]
+ ldflags += [ "-fPIC" ]
+ }
+
+ if (!is_clang) {
+ # Use pipes for communicating between sub-processes. Faster.
+ # (This flag doesn't do anything with Clang.)
+ cflags += [ "-pipe" ]
+ }
+
+ ldflags += [
+ "-Wl,-z,noexecstack",
+ "-Wl,-z,now",
+ "-Wl,-z,relro",
+ ]
+
+ # Compiler instrumentation can introduce dependencies in DSOs to symbols in
+ # the executable they are loaded into, so they are unresolved at link-time.
+ if (!using_sanitizer && !is_safestack) {
+ ldflags += [
+ "-Wl,-z,defs",
+ "-Wl,--as-needed",
+ ]
+ }
+ }
+
+ if ((is_posix && use_lld) || (target_os == "chromeos" && is_ohos)) {
+ # NOTE: Some Chrome OS builds globally disable LLD, but they also build some
+ # targets against ohos toolchains which should use LLD. Therefore we
+ # explicitly select LLD in these cases.
+ ldflags += [ "-fuse-ld=lld" ]
+ if (current_cpu == "arm64") {
+ # Reduce the page size from 65536 in order to reduce binary size slightly
+ # by shrinking the alignment gap between segments. This also causes all
+ # segments to be mapped adjacently, which breakpad relies on.
+ ldflags += [ "-Wl,-z,max-page-size=4096" ]
+ }
+ } else if (use_gold) {
+ ldflags += [ "-fuse-ld=gold" ]
+ if (!is_ohos) {
+ # On ohos, this isn't needed. gcc in the NDK knows to look next to
+ # it with -fuse-ld=gold, and clang gets a --gcc-toolchain flag passed
+ # above.
+ ldflags += [ "-B$gold_path" ]
+
+ if (linux_use_bundled_binutils) {
+ ldflags += [
+ # Experimentation found that using four linking threads
+ # saved ~20% of link time.
+ # Only apply this to the target linker, since the host
+ # linker might not be gold, but isn't used much anyway.
+ "-Wl,--threads",
+ "-Wl,--thread-count=4",
+ ]
+ }
+ }
+ } else if (linux_use_bundled_binutils) {
+ # Gold is the default linker for the bundled binutils so we explicitly
+ # enable the bfd linker when use_gold is not set.
+ ldflags += [ "-fuse-ld=bfd" ]
+ }
+
+ if (use_icf) {
+ ldflags += [ "-Wl,--icf=all" ]
+ }
+
+ if (linux_use_bundled_binutils) {
+ cflags += [ "-B$binutils_path" ]
+ }
+
+ if (is_linux) {
+ cflags += [ "-pthread" ]
+ # Do not use the -pthread ldflag here since it becomes a no-op
+ # when using -nodefaultlibs, which would cause an unused argument
+ # error. "-lpthread" is added in //build/config:default_libs.
+ }
+
+ # Clang-specific compiler flags setup.
+ # ------------------------------------
+ if (is_clang) {
+ cflags += [ "-fcolor-diagnostics" ]
+
+ # Enable -fmerge-all-constants. This used to be the default in clang
+ # for over a decade. It makes clang non-conforming, but is fairly safe
+ # in practice and saves some binary size. We might want to consider
+ # disabling this (https://bugs.llvm.org/show_bug.cgi?id=18538#c13),
+ # but for now it looks like our build might rely on it
+ # (https://crbug.com/829795).
+ cflags += [ "-fmerge-all-constants" ]
+ }
+
+ if (use_lld) {
+ if (is_win) {
+ # On Windows, we call the linker directly, instead of calling it through
+ # the driver.
+ ldflags += [ "--color-diagnostics" ]
+ } else {
+ ldflags += [ "-Wl,--color-diagnostics" ]
+ }
+ }
+
+ if (is_clang && !is_nacl && !use_xcode_clang) {
+ cflags += [
+ "-Xclang",
+ "-mllvm",
+ "-Xclang",
+ "-instcombine-lower-dbg-declare=0",
+ ]
+ }
+
+ # Print absolute paths in diagnostics. There is no precedent for doing this
+ # on Linux/Mac (GCC doesn't support it), but MSVC does this with /FC and
+ # Windows developers rely on it (crbug.com/636109) so only do this on Windows.
+ if (msvc_use_absolute_paths && is_clang && is_win) {
+ cflags += [ "-fdiagnostics-absolute-paths" ]
+ }
+
+ # Makes builds independent of absolute file path.
+ # Currently disabled for nacl since its toolchain lacks this flag (too old).
+ if (symbol_level != 0 && is_clang && !is_nacl && !is_mac && !is_ios &&
+ strip_absolute_paths_from_debug_symbols) {
+ # If debug option is given, clang includes $cwd in debug info by default.
+ # For such build, this flag generates reproducible obj files even we use
+ # different build directory like "out/feature_a" and "out/feature_b" if
+ # we build same files with same compile flag.
+ # Other paths are already given in relative, no need to normalize them.
+ cflags += [
+ "-Xclang",
+ "-fdebug-compilation-dir",
+ "-Xclang",
+ ".",
+ ]
+
+ if (is_win && use_lld) {
+ if (symbol_level == 2 || (is_clang && using_sanitizer)) {
+ # Absolutize source file path for PDB. Pass the real build directory
+ # if the pdb contains source-level debug information.
+ ldflags += [ "/PDBSourcePath:" + rebase_path(root_build_dir) ]
+ } else {
+ # On Windows, (non-sanitizier) symbol_level 1 builds don't contain
+ # debug information in obj files; the linker just creates enough
+ # debug info at link time to produce symbolized stacks (without line
+ # numbers). In that case, there's no downside in using a fake fixed
+ # base directory for paths in the pdb. This makes the pdb output
+ # fully deterministic and independent of the build directory.
+ assert(symbol_level == 1 && !(is_clang && using_sanitizer))
+ ldflags += [ "/PDBSourcePath:o:\fake\prefix" ]
+ }
+ }
+ }
+
+ # Tells the compiler not to use absolute paths when passing the default
+ # paths to the tools it invokes. We don't want this because we don't
+ # really need it and it can mess up the goma cache entries.
+ if (is_clang && !is_nacl) {
+ cflags += [ "-no-canonical-prefixes" ]
+ }
+
+ # C11/C++11 compiler flags setup.
+ # ---------------------------
+ if (is_linux || is_ohos || (is_nacl && is_clang) || current_os == "aix") {
+ if (target_os == "ohos") {
+ cxx11_override = use_cxx11_on_ohos
+ } else {
+ cxx11_override = use_cxx11
+ }
+
+ if (is_clang) {
+ standard_prefix = "c"
+
+ # Since we build with -std=c* and not -std=gnu*, _GNU_SOURCE will not be
+ # defined by the compiler. However, lots of code relies on the
+ # non-standard features that _GNU_SOURCE enables, so define it manually.
+ defines += [ "_GNU_SOURCE" ]
+
+ if (is_nacl) {
+ # Undefine __STRICT_ANSI__ to get non-standard features which would
+ # otherwise not be enabled by NaCl's sysroots.
+ cflags += [ "-U__STRICT_ANSI__" ]
+ }
+ } else {
+ # Gcc does not support ##__VA_ARGS__ when in standards-conforming mode,
+ # but we use this feature in several places in Chromium.
+ standard_prefix = "gnu"
+ }
+
+ # cflags_c += [ "-std=${standard_prefix}11" ]
+ if (cxx11_override) {
+ # Override Chromium's default for projects that wish to stay on C++11.
+ cflags_cc += [ "-std=${standard_prefix}++11" ]
+ } else {
+ cflags_cc += [ "-std=${standard_prefix}++17" ]
+ }
+ } else if (!is_win && !is_nacl && !is_mingw) {
+ if (target_os == "ohos") {
+ cxx11_override = use_cxx11_on_ohos
+ } else {
+ cxx11_override = use_cxx11
+ }
+
+ if (cxx11_override) {
+ cflags_cc += [ "-std=c++11" ]
+ } else {
+ cflags_cc += [ "-std=c++17" ]
+ }
+ }
+
+ if (is_mac) {
+ # The system libc++ on Mac doesn't have aligned allocation in C++17.
+ defines += [ "_LIBCPP_HAS_NO_ALIGNED_ALLOCATION" ]
+ cflags_cc += [ "-stdlib=libc++" ]
+ ldflags += [ "-stdlib=libc++" ]
+ }
+
+ # Add flags for link-time optimization. These flags enable
+ # optimizations/transformations that require whole-program visibility at link
+ # time, so they need to be applied to all translation units, and we may end up
+ # with miscompiles if only part of the program is compiled with LTO flags. For
+ # that reason, we cannot allow targets to enable or disable these flags, for
+ # example by disabling the optimize configuration.
+ if (!is_debug && use_thin_lto &&
+ (current_toolchain == default_toolchain ||
+ (is_ohos && defined(ohoa_secondary_abi_toolchain) &&
+ current_toolchain == ohos_secondary_abi_toolchain))) {
+ assert(use_lld || target_os == "chromeos",
+ "gold plugin only supported with ChromeOS")
+
+ cflags += [ "-flto=thin" ]
+
+ if (thin_lto_enable_optimizations) {
+ lto_opt_level = 2
+ } else {
+ lto_opt_level = 0
+ }
+
+ if (is_win) {
+ # This is a straight translation of the non-Windows flags below,
+ # except we do not use the ThinLTO cache, which leaks temporary
+ # files on Windows (https://crbug.com/871962).
+ ldflags += [
+ "/opt:lldlto=" + lto_opt_level,
+ "/opt:lldltojobs=8",
+ ]
+ } else {
+ ldflags += [ "-flto=thin" ]
+
+ # Limit the parallelism to avoid too aggressive competition between
+ # linker jobs. This is still suboptimal to a potential dynamic
+ # resource allocation scheme, but should be good enough.
+ if (use_lld) {
+ # Limit the size of the ThinLTO cache to the lesser of 10% of available disk
+ # space, 10GB and 100000 files.
+ cache_policy =
+ "cache_size=10%:cache_size_bytes=10g:cache_size_files=100000"
+ ldflags += [
+ "-Wl,--thinlto-jobs=8",
+ "-Wl,--thinlto-cache-dir=" +
+ rebase_path("$root_out_dir/thinlto-cache", root_build_dir),
+ "-Wl,--thinlto-cache-policy,$cache_policy",
+ ]
+ } else {
+ ldflags += [ "-Wl,-plugin-opt,jobs=8" ]
+ }
+
+ if (use_lld) {
+ ldflags += [ "-Wl,--lto-O" + lto_opt_level ]
+ if (thin_lto_enable_optimizations) {
+ if (is_ohos) {
+ ldflags += [
+ "-Wl,-mllvm",
+ "-Wl,-import-instr-limit=5",
+ ]
+ }
+ }
+ } else {
+ not_needed([ "lto_opt_level" ])
+ }
+ }
+
+ if (!is_ohos) {
+ cflags += [ "-fwhole-program-vtables" ]
+ if (!is_win) {
+ ldflags += [ "-fwhole-program-vtables" ]
+ }
+ }
+
+ # Work-around for http://openradar.appspot.com/20356002
+ if (is_mac) {
+ ldflags += [ "-Wl,-all_load" ]
+ }
+
+ # This flag causes LTO to create an .ARM.attributes section with the correct
+ # architecture. This is necessary because LLD will refuse to link a program
+ # unless the architecture revision in .ARM.attributes is sufficiently new.
+ if (current_cpu == "arm") {
+ ldflags += [ "-march=$arm_arch" ]
+ }
+ }
+
+ if (compiler_timing) {
+ if (is_clang) {
+ if (is_win) {
+ cflags += [ "-Xclang" ]
+ }
+ cflags += [ "-ftime-report" ]
+ } else if (is_win) {
+ cflags += [
+ # "Documented" here:
+ # http://aras-p.info/blog/2017/10/23/Best-unknown-MSVC-flag-d2cgsummary/
+ "/d2cgsummary",
+ ]
+ }
+ }
+
+ # Pass flag to LLD to work around issue in Valgrind related to
+ # location of debug symbols.
+ if (use_lld && ro_segment_workaround_for_valgrind) {
+ ldflags += [ "-Wl,--no-rosegment" ]
+ }
+
+ # This flag enforces that member pointer base types are complete. It helps
+ # prevent us from running into problems in the Microsoft C++ ABI (see
+ # https://crbug.com/847724).
+ if (is_clang && !is_nacl && target_os != "chromeos" && !use_xcode_clang &&
+ (is_win || use_custom_libcxx)) {
+ cflags += [ "-fcomplete-member-pointers" ]
+ }
+
+ # Pass the same C/C++ flags to the objective C/C++ compiler.
+ cflags_objc += cflags_c
+ cflags_objcc += cflags_cc
+
+ # Assign any flags set for the C compiler to asmflags so that they are sent
+ # to the assembler. The Windows assembler takes different types of flags
+ # so only do so for posix platforms.
+ if (is_posix) {
+ asmflags += cflags
+ asmflags += cflags_c
+ }
+}
+
+# This provides the basic options to select the target CPU and ABI.
+# It is factored out of "compiler" so that special cases can use this
+# without using everything that "compiler" brings in. Options that
+# tweak code generation for a particular CPU do not belong here!
+# See "compiler_codegen", below.
+config("compiler_cpu_abi") {
+ cflags = []
+ ldflags = []
+ defines = []
+
+ if (is_posix && !(is_mac || is_ios)) {
+ # CPU architecture. We may or may not be doing a cross compile now, so for
+ # simplicity we always explicitly set the architecture.
+ if (current_cpu == "x64") {
+ cflags += [
+ "-m64",
+ "-march=x86-64",
+ ]
+ ldflags += [ "-m64" ]
+ } else if (current_cpu == "x86") {
+ cflags += [ "-m32" ]
+ ldflags += [ "-m32" ]
+ if (!is_nacl) {
+ cflags += [
+ "-msse2",
+ "-mfpmath=sse",
+ "-mmmx",
+ ]
+ }
+ } else if (current_cpu == "arm") {
+ if (is_clang && !is_ohos && !is_nacl) {
+ cflags += [ "--target=arm-linux-gnueabihf" ]
+ ldflags += [ "--target=arm-linux-gnueabihf" ]
+ }
+ if (!is_nacl) {
+ cflags += [
+ "-march=$arm_arch",
+ "-mfloat-abi=$arm_float_abi",
+ ]
+ }
+ if (arm_tune != "") {
+ cflags += [ "-mtune=$arm_tune" ]
+ }
+ } else if (current_cpu == "arm64") {
+ if (is_clang && !is_ohos && !is_nacl) {
+ cflags += [ "--target=aarch64-linux-gnu" ]
+ ldflags += [ "--target=aarch64-linux-gnu" ]
+ }
+ if (is_clang && is_ohos) {
+ ldflags += [ "-Wl,--hash-style=gnu" ]
+ }
+ }
+ }
+
+ asmflags = cflags
+}
+
+# This provides options to tweak code generation that are necessary
+# for particular Chromium code or for working around particular
+# compiler bugs (or the combination of the two).
+config("compiler_codegen") {
+ configs = []
+ cflags = []
+
+ if (is_nacl) {
+ configs += [ "//build/config/nacl:compiler_codegen" ]
+ } else if (is_posix && !is_mac && !is_ios) {
+ if (current_cpu == "x86") {
+ if (is_clang) {
+ cflags += [ "-momit-leaf-frame-pointer" ]
+ }
+ } else if (current_cpu == "arm") {
+ if (is_ohos && !is_clang) {
+ # Clang doesn't support these flags.
+ cflags += [
+ "-fno-tree-sra",
+ "-fno-caller-saves",
+ ]
+ }
+ }
+ }
+
+ asmflags = cflags
+}
+
+config("compiler_arm_fpu") {
+ if (current_cpu == "arm" && !is_ios && !is_nacl) {
+ cflags = [ "-mfpu=$arm_fpu" ]
+ asmflags = cflags
+ }
+}
+
+config("compiler_arm_thumb") {
+ if (current_cpu == "arm" && arm_use_thumb && is_posix &&
+ !(is_mac || is_ios || is_nacl)) {
+ cflags = [ "-mthumb" ]
+ if (is_ohos && !is_clang) {
+ # Clang doesn't support this option.
+ cflags += [ "-mthumb-interwork" ]
+ }
+ }
+}
+
+config("compiler_arm") {
+ if (current_cpu == "arm" && is_chromeos) {
+ # arm is normally the default mode for clang, but on chromeos a wrapper
+ # is used to pass -mthumb, and therefore change the default.
+ cflags = [ "-marm" ]
+ }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+ defines = []
+ configs = []
+
+ # The order of this config is important: it must appear before
+ # ohos:runtime_library.
+ if (is_posix) {
+ configs += [ "//build/config/posix:runtime_library" ]
+ }
+
+ # System-specific flags. If your compiler flags apply to one of the
+ # categories here, add it to the associated file to keep this shared config
+ # smaller.
+ if (is_win) {
+ configs += [ "//build/config/win:runtime_library" ]
+ } else if (is_linux) {
+ configs += [ "//build/config/linux:runtime_library" ]
+ } else if (is_ios) {
+ configs += [ "//build/config/ios:runtime_library" ]
+ } else if (is_mac) {
+ configs += [ "//build/config/mac:runtime_library" ]
+ } else if (is_ohos) {
+ configs += [ "//build/config/ohos:runtime_library" ]
+ }
+
+ if (is_component_build) {
+ defines += [ "COMPONENT_BUILD" ]
+ }
+}
+
+# default_warnings ------------------------------------------------------------
+#
+# Collects all warning flags that are used by default. This is used as a
+# subconfig of both chromium_code and no_chromium_code. This way these
+# flags are guaranteed to appear on the compile command line after -Wall.
+config("default_warnings") {
+ cflags = []
+ cflags_cc = []
+ ldflags = []
+
+ if ((is_mac || is_ios) && !is_nacl) {
+ # When compiling Objective-C, warns if a method is used whose
+ # availability is newer than the deployment target.
+ cflags += [ "-Wunguarded-availability" ]
+ }
+
+ if (is_ios) {
+ # When compiling Objective-C, warns if a selector named via @selector has
+ # not been defined in any visible interface.
+ cflags += [ "-Wundeclared-selector" ]
+ }
+
+ # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+ # warning).
+ if (current_cpu == "arm" && !is_clang) {
+ cflags += [ "-Wno-psabi" ]
+ }
+
+ if (!is_clang) {
+ cflags_cc += [
+ # See comment for -Wno-c++11-narrowing.
+ "-Wno-narrowing",
+ ]
+
+ # -Wunused-local-typedefs is broken in gcc,
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=63872
+ cflags += [ "-Wno-unused-local-typedefs" ]
+
+ # Don't warn about "maybe" uninitialized. Clang doesn't include this
+ # in -Wall but gcc does, and it gives false positives.
+ cflags += [ "-Wno-maybe-uninitialized" ]
+ cflags += [ "-Wno-deprecated-declarations" ]
+
+ # GCC assumes 'this' is never nullptr and optimizes away code
+ # like "if (this == nullptr) ...": [1]. However, some Chromium
+ # code relies on these types of null pointer checks [2], so
+ # disable this optimization.
+ # [1] https://gcc.gnu.org/gcc-6/porting_to.html#this-cannot-be-null
+ # [2] https://crbug.com/784492#c13
+ cflags += [ "-fno-delete-null-pointer-checks" ]
+
+ # -Wcomment gives too many false positives in the case a
+ # backslash ended comment line is followed by a new line of
+ # comments
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61638
+ cflags += [ "-Wno-comments" ]
+ }
+
+ # Common Clang and GCC warning setup.
+ if (!is_win || is_clang) {
+ cflags += [
+ # Disables.
+ "-Wno-missing-field-initializers", # "struct foo f = {0};"
+ "-Wno-unused-parameter", # Unused function parameters.
+ ]
+ }
+
+ if (is_mingw) {
+ cflags += [
+ "-Wno-error=c99-designator",
+ "-Wno-error=anon-enum-enum-conversion",
+ "-Wno-error=implicit-fallthrough",
+ "-Wno-error=sizeof-array-div",
+ "-Wno-error=reorder-init-list",
+ "-Wno-error=range-loop-construct",
+ "-Wno-error=deprecated-copy",
+ "-Wno-error=implicit-int-float-conversion",
+ "-Wno-error=inconsistent-dllimport",
+ "-Wno-error=unknown-warning-option",
+ "-Wno-error=abstract-final-class",
+ "-Wno-error=sign-compare",
+ ]
+ }
+
+ if (is_clang) {
+ cflags += [
+ # This warns on using ints as initializers for floats in
+ # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+ # which happens in several places in chrome code. Not sure if
+ # this is worth fixing.
+ "-Wno-c++11-narrowing",
+ "-Wno-unneeded-internal-declaration",
+ ]
+ if (use_musl) {
+ cflags += [
+ "-Wno-error=c99-designator",
+ "-Wno-error=anon-enum-enum-conversion",
+ "-Wno-error=implicit-fallthrough",
+ "-Wno-error=sizeof-array-div",
+ "-Wno-error=reorder-init-list",
+ "-Wno-error=range-loop-construct",
+ "-Wno-error=deprecated-copy",
+ "-Wno-error=implicit-int-float-conversion",
+ "-Wno-error=inconsistent-dllimport",
+ "-Wno-error=unknown-warning-option",
+ "-Wno-error=abstract-final-class",
+ "-Wno-error=sign-compare",
+ "-Wno-error=int-in-bool-context",
+ "-Wno-error=xor-used-as-pow",
+ "-Wno-error=return-stack-address",
+ "-Wno-error=dangling-gsl",
+ ]
+ }
+
+ # use_xcode_clang only refers to the iOS toolchain, host binaries use
+ # chromium's clang always.
+ if (!is_nacl) {
+ cflags += [ "-Wno-undefined-var-template" ]
+ if (current_toolchain == host_toolchain || !use_xcode_clang ||
+ xcode_version_int >= 930) {
+ cflags += [
+ "-Wno-nonportable-include-path",
+ "-Wno-user-defined-warnings",
+ "-Wno-unused-lambda-capture",
+ ]
+ }
+ if (current_toolchain == host_toolchain || !use_xcode_clang ||
+ xcode_version_int >= 1000) {
+ cflags += [ "-Wno-null-pointer-arithmetic" ]
+ }
+ if (current_toolchain == host_toolchain || !use_xcode_clang) {
+ # Flags NaCl (Clang 3.7) and Xcode 9.2 (Clang clang-900.0.39.2) do not
+ # recognize.
+ cflags += [ "-Wno-enum-compare-switch" ]
+ }
+ }
+ }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+ if (is_win) {
+ cflags = [ "/W4" ] # Warning level 4.
+
+ if (is_clang) {
+ # Opt in to additional [[nodiscard]] on standard library methods.
+ defines = [ "_HAS_NODISCARD" ]
+ }
+ } else {
+ cflags = [ "-Wall" ]
+ if (treat_warnings_as_errors) {
+ cflags += [ "-Werror" ]
+
+ # The compiler driver can sometimes (rarely) emit warnings before calling
+ # the actual linker. Make sure these warnings are treated as errors as
+ # well.
+ ldflags = [ "-Werror" ]
+ }
+ if (is_clang) {
+ # Enable extra warnings for chromium_code when we control the compiler.
+ cflags += [ "-Wextra" ]
+ }
+
+ # In Chromium code, we define __STDC_foo_MACROS in order to get the
+ # C99 macros on Mac and Linux.
+ defines = [
+ "__STDC_CONSTANT_MACROS",
+ "__STDC_FORMAT_MACROS",
+ ]
+
+ if (!is_debug && !using_sanitizer &&
+ (!is_linux || !is_clang || is_official_build)) {
+ # _FORTIFY_SOURCE isn't really supported by Clang now, see
+ # http://llvm.org/bugs/show_bug.cgi?id=16821.
+ # It seems to work fine with Ubuntu 12 headers though, so use it in
+ # official builds.
+ #
+ # Non-chromium code is not guaranteed to compile cleanly with
+ # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
+ # disabled, so only do that for Release build.
+ defines += [ "_FORTIFY_SOURCE=2" ]
+ }
+
+ if (is_mac) {
+ cflags_objc = [ "-Wobjc-missing-property-synthesis" ]
+ cflags_objcc = [ "-Wobjc-missing-property-synthesis" ]
+ }
+ }
+
+ if (is_clang) {
+ cflags += [
+ # Warn on missing break statements at the end of switch cases.
+ # For intentional fallthrough, use FALLTHROUGH; from
+ # base/compiler_specific.h
+ "-Wimplicit-fallthrough",
+
+ # Thread safety analysis. See base/thread_annotations.h and
+ # https://clang.llvm.org/docs/ThreadSafetyAnalysis.html
+ "-Wthread-safety",
+ ]
+ }
+
+ configs = [ ":default_warnings" ]
+}
+
+config("no_chromium_code") {
+ cflags = []
+ cflags_cc = []
+ defines = []
+
+ if (is_win) {
+ cflags += [
+ "/W3", # Warning level 3.
+ "/wd4800", # Disable warning when forcing value to bool.
+ "/wd4267", # size_t to int.
+ "/wd4996", # Deprecated function warning.
+ ]
+ defines += [
+ "_CRT_NONSTDC_NO_WARNINGS",
+ "_CRT_NONSTDC_NO_DEPRECATE",
+ ]
+ } else {
+ # GCC may emit unsuppressible warnings so don't add -Werror for no chromium
+ # code. crbug.com/589724
+ if (treat_warnings_as_errors && is_clang) {
+ cflags += [ "-Werror" ]
+ ldflags = [ "-Werror" ]
+ }
+ if (is_clang && !is_nacl) {
+ cflags += [ "-Wall" ]
+ }
+ }
+
+ if (is_clang) {
+ cflags += [
+ # Lots of third-party libraries have unused variables. Instead of
+ # suppressing them individually, we just blanket suppress them here.
+ "-Wno-unused-variable",
+ ]
+ }
+
+ configs = [ ":default_warnings" ]
+}
+
+# noshadowing -----------------------------------------------------------------
+#
+# Allows turning -Wshadow on.
+
+config("noshadowing") {
+ # This flag has to be disabled for nacl because the nacl compiler is too
+ # strict about shadowing.
+ if (is_clang && !is_nacl) {
+ cflags = [ "-Wshadow" ]
+ }
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+ if (is_win) {
+ cflags_cc = [ "/GR" ]
+ } else {
+ cflags_cc = [ "-frtti" ]
+ }
+}
+
+config("no_rtti") {
+ # Some sanitizer configs may require RTTI to be left enabled globally
+ if (!use_rtti) {
+ if (is_win) {
+ cflags_cc = [ "/GR-" ]
+ } else {
+ cflags_cc = [ "-fno-rtti" ]
+ cflags_objcc = cflags_cc
+ }
+ }
+}
+
+# export_dynamic ---------------------------------------------------------------
+#
+# Ensures all exported symbols are added to the dynamic symbol table. This is
+# necessary to expose Chrome's custom operator new() and operator delete() (and
+# other memory-related symbols) to libraries. Otherwise, they might
+# (de)allocate memory on a different heap, which would spell trouble if pointers
+# to heap-allocated memory are passed over shared library boundaries.
+config("export_dynamic") {
+ if (is_desktop_linux || export_libcxxabi_from_executables) {
+ ldflags = [ "-rdynamic" ]
+ }
+}
+
+# thin_archive -----------------------------------------------------------------
+#
+# Enables thin archives on posix. Regular archives directly include the object
+# files used to generate it. Thin archives merely reference the object files.
+# This makes building them faster since it requires less disk IO, but is
+# inappropriate if you wish to redistribute your static library.
+# This config is added to the global config, so thin archives should already be
+# enabled. If you want to make a distributable static library, you need to do 2
+# things:
+# 1. Set complete_static_lib so that all dependencies of the library make it
+# into the library. See `gn help complete_static_lib` for details.
+# 2. Remove the thin_archive config, so that the .a file actually contains all
+# .o files, instead of just references to .o files in the build directory
+config("thin_archive") {
+ # Mac and iOS use the mac-specific "libtool" command, not ar, which doesn't
+ # have a "thin archive" mode (it does accept -T, but it means truncating
+ # archive names to 16 characters, which is not what we want).
+ if (is_posix && !is_nacl && !is_mac && !is_ios) {
+ arflags = [ "-T" ]
+ }
+}
+
+# exceptions -------------------------------------------------------------------
+#
+# Allows turning Exceptions on or off.
+
+config("exceptions") {
+ if (is_win) {
+ # Enables exceptions in the STL.
+ if (!use_custom_libcxx) {
+ defines = [ "_HAS_EXCEPTIONS=1" ]
+ }
+ cflags_cc = [ "/EHsc" ]
+ } else {
+ cflags_cc = [ "-fexceptions" ]
+ cflags_objcc = cflags_cc
+ }
+}
+
+config("no_exceptions") {
+ if (is_win) {
+ # Disables exceptions in the STL.
+ # libc++ uses the __has_feature macro to control whether to use exceptions,
+ # so defining this macro is unnecessary. Defining _HAS_EXCEPTIONS to 0 also
+ # breaks libc++ because it depends on MSVC headers that only provide certain
+ # declarations if _HAS_EXCEPTIONS is 1. Those MSVC headers do not use
+ # exceptions, despite being conditional on _HAS_EXCEPTIONS.
+ if (!use_custom_libcxx) {
+ defines = [ "_HAS_EXCEPTIONS=0" ]
+ }
+ } else {
+ cflags_cc = [ "-fno-exceptions" ]
+ cflags_objcc = cflags_cc
+ }
+}
+
+# Warnings ---------------------------------------------------------------------
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+config("wexit_time_destructors") {
+ if (is_clang) {
+ cflags = [ "-Wexit-time-destructors" ]
+ }
+}
+
+# On Windows compiling on x64, VC will issue a warning when converting
+# size_t to int because it will truncate the value. Our code should not have
+# these warnings and one should use a static_cast or a checked_cast for the
+# conversion depending on the case. However, a lot of code still needs to be
+# fixed. Apply this config to such targets to disable the warning.
+#
+# Note that this can be applied regardless of platform and architecture to
+# clean up the call sites. This will only apply the flag when necessary.
+#
+# This config is just an alias to no_shorten_64_warnings and will
+# suppress a superset of warning 4267 and any 64-bit -> 32-bit implicit
+# conversions. Having both for a time means not having to go through and
+# update all references to no_size_t_to_int_warning throughout the codebase
+# atomically.
+#
+# Any new warning suppressions should use the no_shorten_64_warnings
+# config below and not this.
+config("no_size_t_to_int_warning") {
+ configs = [ ":no_shorten_64_warnings" ]
+}
+
+# As part of re-enabling -Wconversion (see issue 588506) some code
+# will continue to generate warnings.
+# The first warning to be enabled will be -Wshorten-64-to-32.
+#
+# Code that currently generates warnings for this can include this
+# config to disable them.
+config("no_shorten_64_warnings") {
+ if (current_cpu == "x64" || current_cpu == "arm64") {
+ if (is_clang) {
+ cflags = [ "-Wno-shorten-64-to-32" ]
+ } else {
+ if (is_win) {
+ # MSVC does not have an explicit warning equivalent to
+ # -Wshorten-64-to-32 but 4267 warns for size_t -> int
+ # on 64-bit builds, so is the closest.
+ cflags = [ "/wd4267" ]
+ }
+ }
+ }
+}
+
+# Some code presumes that pointers to structures/objects are compatible
+# regardless of whether what they point to is already known to be valid.
+# gcc 4.9 and earlier had no way of suppressing this warning without
+# suppressing the rest of them. Here we centralize the identification of
+# the gcc 4.9 toolchains.
+config("no_incompatible_pointer_warnings") {
+ cflags = []
+ if (is_clang) {
+ cflags += [ "-Wno-incompatible-pointer-types" ]
+ } else if (is_chromeos && current_cpu == "arm") {
+ cflags += [ "-w" ]
+ }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# The BUILDCONFIG file sets the "default_optimization" config on targets by
+# default. It will be equivalent to either "optimize" (release) or
+# "no_optimize" (debug) optimization configs.
+#
+# You can override the optimization level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+# configs -= [ "//build/config/compiler:default_optimization" ]
+# configs += [ "//build/config/compiler:optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
+if (is_win) {
+ common_optimize_on_cflags = [
+ "/Ob2", # Both explicit and auto inlining.
+ "/Oy-", # Disable omitting frame pointers, must be after /O2.
+ "/Zc:inline", # Remove unreferenced COMDAT (faster links).
+ ]
+ if (!is_asan) {
+ common_optimize_on_cflags += [
+ # Put data in separate COMDATs. This allows the linker
+ # to put bit-identical constants at the same address even if
+ # they're unrelated constants, which saves binary size.
+ # This optimization can't be used when ASan is enabled because
+ # it is not compatible with the ASan ODR checker.
+ "/Gw",
+ ]
+ }
+ common_optimize_on_ldflags = []
+
+ # /OPT:ICF is not desirable in Debug builds, since code-folding can result in
+ # misleading symbols in stack traces. It is also incompatible with
+ # incremental linking, which we enable for both Debug and component builds.
+ if (!is_debug && !is_component_build) {
+ common_optimize_on_ldflags += [ "/OPT:ICF" ] # Redundant COMDAT folding.
+ }
+
+ if (is_official_build) {
+ common_optimize_on_ldflags += [ "/OPT:REF" ] # Remove unreferenced data.
+
+ if (!use_lld && !is_clang) {
+ common_optimize_on_ldflags += [
+ # Set the number of LTCG code-gen threads to eight. The default is four.
+ # This gives a 5-10% link speedup.
+ "/cgthreads:8",
+ ]
+ if (use_incremental_wpo) {
+ # Incremental Link-time code generation.
+ common_optimize_on_ldflags += [ "/LTCG:INCREMENTAL" ]
+ } else {
+ common_optimize_on_ldflags += [ "/LTCG" ] # Link-time code generation.
+ }
+ }
+ }
+} else {
+ common_optimize_on_cflags = []
+ common_optimize_on_ldflags = []
+
+ if (is_ohos) {
+ common_optimize_on_ldflags += [
+ # Warn in case of text relocations.
+ "-Wl,--warn-shared-textrel",
+ ]
+ }
+
+ if (is_mac || is_ios) {
+ if (symbol_level == 2) {
+ # Mac dead code stripping requires symbols.
+ common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
+ }
+ } else if (current_os != "aix") {
+ # Non-Mac Posix flags.
+ # Aix does not support these.
+
+ common_optimize_on_cflags += [
+ # Don't emit the GCC version ident directives, they just end up in the
+ # .comment section taking up binary size.
+ "-fno-ident",
+
+ # Put data and code in their own sections, so that unused symbols
+ # can be removed at link time with --gc-sections.
+ "-fdata-sections",
+ "-ffunction-sections",
+ ]
+
+ common_optimize_on_ldflags += [
+ # Specifically tell the linker to perform optimizations.
+ # See http://lwn.net/Articles/192624/ .
+ # -O2 enables string tail merge optimization in gold and lld.
+ "-Wl,-O2",
+ ]
+ if (!is_mingw) {
+ common_optimize_on_ldflags += [ "-Wl,--gc-sections" ]
+ }
+ }
+}
+
+config("default_stack_frames") {
+ if (is_posix) {
+ if (enable_frame_pointers) {
+ cflags = [ "-fno-omit-frame-pointer" ]
+ } else {
+ cflags = [ "-fomit-frame-pointer" ]
+ }
+ }
+ # On Windows, the flag to enable framepointers "/Oy-" must always come after
+ # the optimization flag [e.g. "/O2"]. The optimization flag is set by one of
+ # the "optimize" configs, see rest of this file. The ordering that cflags are
+ # applied is well-defined by the GN spec, and there is no way to ensure that
+ # cflags set by "default_stack_frames" is applied after those set by an
+ # "optimize" config. Similarly, there is no way to propagate state from this
+ # config into the "optimize" config. We always apply the "/Oy-" config in the
+ # definition for common_optimize_on_cflags definition, even though this may
+ # not be correct.
+}
+
+# Default "optimization on" config.
+config("optimize") {
+ if (optimize_for_size && !is_nacl) {
+ # Favor size over speed.
+ if (is_clang) {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-Os" ] + common_optimize_on_cflags
+ }
+ } else {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ }
+ ldflags = common_optimize_on_ldflags
+}
+
+# Same config as 'optimize' but without the WPO flag.
+config("optimize_no_wpo") {
+ if (is_win) {
+ # Favor size over speed, /O1 must be before the common flags. The GYP
+ # build also specifies /Os and /GF but these are implied by /O1.
+ cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+ } else if (optimize_for_size && !is_nacl) {
+ # Favor size over speed.
+ if (is_clang) {
+ cflags = [ "-Oz" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-Os" ] + common_optimize_on_cflags
+ }
+ } else if (optimize_for_fuzzing) {
+ cflags = [ "-O0" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ }
+ ldflags = common_optimize_on_ldflags
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+ if (is_win) {
+ cflags = [
+ "/Od", # Disable optimization.
+ "/Ob0", # Disable all inlining (on by default).
+ "/GF", # Enable string pooling (off by default).
+ ]
+ } else if (is_ohos) {
+ # On ohos we kind of optimize some things that don't affect debugging
+ # much even when optimization is disabled to get the binary size down.
+ if (is_clang) {
+ cflags = [ "-Oz" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-Os" ] + common_optimize_on_cflags
+ }
+ } else {
+ cflags = [ "-O0" ]
+ ldflags = []
+ }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+ if (is_nacl && is_nacl_irt) {
+ # The NaCl IRT is a special case and always wants its own config.
+ # Various components do:
+ # if (!is_debug) {
+ # configs -= [ "//build/config/compiler:default_optimization" ]
+ # configs += [ "//build/config/compiler:optimize_max" ]
+ # }
+ # So this config has to have the selection logic just like
+ # "default_optimization", below.
+ configs = [ "//build/config/nacl:irt_optimize" ]
+ } else {
+ ldflags = common_optimize_on_ldflags
+ if (is_win) {
+ # Favor speed over size, /O2 must be before the common flags. The GYP
+ # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+ cflags = [ "/O2" ] + common_optimize_on_cflags
+
+ if (is_official_build) {
+ if (!is_clang) {
+ cflags += [
+ "/GL", # Whole program optimization.
+
+ # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+ # Probably anything that this would catch that wouldn't be caught
+ # in a normal build isn't going to actually be a bug, so the
+ # incremental value of C4702 for PGO builds is likely very small.
+ "/wd4702",
+ ]
+ }
+ }
+ } else if (optimize_for_fuzzing) {
+ cflags = [ "-O0" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-O2" ] + common_optimize_on_cflags
+ }
+ }
+}
+
+# This config can be used to override the default settings for per-component
+# and whole-program optimization, optimizing the particular target for speed
+# instead of code size. This config is exactly the same as "optimize_max"
+# except that we use -O3 instead of -O2 on non-win, non-IRT platforms.
+config("optimize_speed") {
+ if (is_nacl && is_nacl_irt) {
+ # The NaCl IRT is a special case and always wants its own config.
+ # Various components do:
+ # if (!is_debug) {
+ # configs -= [ "//build/config/compiler:default_optimization" ]
+ # configs += [ "//build/config/compiler:optimize_max" ]
+ # }
+ # So this config has to have the selection logic just like
+ # "default_optimization", below.
+ configs = [ "//build/config/nacl:irt_optimize" ]
+ } else {
+ ldflags = common_optimize_on_ldflags
+ if (is_win) {
+ # Favor speed over size, /O2 must be before the common flags. The GYP
+ # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+ cflags = [ "/O2" ] + common_optimize_on_cflags
+
+ if (is_official_build && !is_clang) {
+ cflags += [
+ "/GL", # Whole program optimization.
+
+ # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+ # Probably anything that this would catch that wouldn't be caught in a
+ # normal build isn't going to actually be a bug, so the incremental
+ # value of C4702 for PGO builds is likely very small.
+ "/wd4702",
+ ]
+ }
+ } else if (optimize_for_fuzzing) {
+ cflags = [ "-O0" ] + common_optimize_on_cflags
+ } else {
+ cflags = [ "-O3" ] + common_optimize_on_cflags
+ }
+ }
+}
+
+config("optimize_fuzzing") {
+ cflags = [ "-O0" ] + common_optimize_on_cflags
+ ldflags = common_optimize_on_ldflags
+ visibility = [ ":default_optimization" ]
+}
+
+# The default optimization applied to all targets. This will be equivalent to
+# either "optimize" or "no_optimize", depending on the build flags.
+config("default_optimization") {
+ if (is_nacl && is_nacl_irt) {
+ # The NaCl IRT is a special case and always wants its own config.
+ # It gets optimized the same way regardless of the type of build.
+ configs = [ "//build/config/nacl:irt_optimize" ]
+ } else if (is_debug) {
+ configs = [ ":no_optimize" ]
+ } else if (optimize_for_fuzzing) {
+ assert(!is_win, "Fuzzing optimize level not supported on Windows")
+
+ # Coverage build is quite slow. Using "optimize_for_fuzzing" makes it even
+ # slower as it uses "-O1" instead of "-O3". Prevent that from happening.
+ assert(!use_clang_coverage,
+ "optimize_for_fuzzing=true should not be used with " +
+ "use_clang_coverage=true.")
+ configs = [ ":optimize_fuzzing" ]
+ } else {
+ configs = [ ":optimize" ]
+ }
+}
+
+_clang_sample_profile = ""
+if (is_clang && current_toolchain == default_toolchain) {
+ if (clang_sample_profile_path != "") {
+ _clang_sample_profile = clang_sample_profile_path
+ } else if (clang_use_default_sample_profile) {
+ assert(build_with_chromium,
+ "Our default profiles currently only apply to Chromium")
+ assert(is_ohos || is_desktop_linux,
+ "The current platform has no default profile")
+ _clang_sample_profile = ""
+ }
+}
+
+# Clang offers a way to assert that AFDO profiles are accurate, which causes it
+# to optimize functions not represented in a profile more aggressively for size.
+# This config can be toggled in cases where shaving off binary size hurts
+# performance too much.
+config("afdo_optimize_size") {
+ if (_clang_sample_profile != "" && sample_profile_is_accurate) {
+ cflags = [ "-fprofile-sample-accurate" ]
+ }
+}
+
+# GCC and clang support a form of profile-guided optimization called AFDO.
+# There are some targeted places that AFDO regresses (and an icky interaction
+# between //base/allocator:tcmalloc and AFDO on GCC), so we provide a separate
+# config to allow AFDO to be disabled per-target.
+config("afdo") {
+ if (is_clang) {
+ if (_clang_sample_profile != "") {
+ rebased_clang_sample_profile =
+ rebase_path(_clang_sample_profile, root_build_dir)
+ cflags = [ "-fprofile-sample-use=${rebased_clang_sample_profile}" ]
+ inputs = [ _clang_sample_profile ]
+ }
+ } else if (auto_profile_path != "" &&
+ current_toolchain == default_toolchain) {
+ cflags = [ "-fauto-profile=${auto_profile_path}" ]
+ inputs = [ auto_profile_path ]
+ }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+# The BUILDCONFIG file sets the "default_symbols" config on targets by
+# default. It will be equivalent to one the three specific symbol levels.
+#
+# You can override the symbol level on a per-target basis by removing the
+# default config and then adding the named one you want:
+#
+# configs -= [ "//build/config/compiler:default_symbols" ]
+# configs += [ "//build/config/compiler:symbols" ]
+
+# A helper config that all configs passing /DEBUG to the linker should
+# include as sub-config.
+config("win_pdbaltpath") {
+ visibility = [
+ ":minimal_symbols",
+ ":symbols",
+ ]
+
+ # /DEBUG causes the linker to generate a pdb file, and to write the absolute
+ # path to it in the executable file it generates. This flag turns that
+ # absolute path into just the basename of the pdb file, which helps with
+ # build reproducibility. Debuggers look for pdb files next to executables,
+ # so there's no downside to always using this.
+ ldflags = [ "/pdbaltpath:%_PDB%" ]
+}
+
+# Full symbols.
+config("symbols") {
+ if (is_win) {
+ if (is_clang) {
+ # Note that with VC++ this requires is_win_fastlink, enforced elsewhere.
+ cflags = [ "/Z7" ] # Debug information in the .obj files.
+ } else {
+ cflags = [ "/Zi" ] # Produce PDB file, no edit and continue.
+ }
+
+ if (is_win_fastlink && !use_lld) {
+ # Tell VS 2015+ to create a PDB that references debug
+ # information in .obj and .lib files instead of copying
+ # it all. This flag is incompatible with /PROFILE
+ ldflags = [ "/DEBUG:FASTLINK" ]
+ } else if (is_clang && use_lld && use_ghash) {
+ cflags += [
+ "-mllvm",
+ "-emit-codeview-ghash-section",
+ ]
+ ldflags = [ "/DEBUG:GHASH" ]
+ } else {
+ ldflags = [ "/DEBUG" ]
+ }
+
+ # All configs using /DEBUG should include this:
+ configs = [ ":win_pdbaltpath" ]
+
+ if (is_clang) {
+ # /DEBUG:FASTLINK requires every object file to have standalone debug
+ # information.
+ if (is_win_fastlink && !use_lld) {
+ cflags += [ "-fstandalone-debug" ]
+ } else {
+ cflags += [ "-fno-standalone-debug" ]
+ }
+ }
+ } else {
+ if (is_mac || is_ios) {
+ cflags = [ "-gdwarf-2" ]
+ if (is_mac && enable_dsyms) {
+ # If generating dSYMs, specify -fno-standalone-debug. This was
+ # originally specified for https://crbug.com/479841 because dsymutil
+ # could not handle a 4GB dSYM file. But dsymutil from Xcodes prior to
+ # version 7 also produces debug data that is incompatible with Breakpad
+ # dump_syms, so this is still required (https://crbug.com/622406).
+ cflags += [ "-fno-standalone-debug" ]
+ }
+ } else {
+ cflags = []
+ if (!use_debug_fission && current_cpu == "arm") {
+ # dump_syms has issues with dwarf4 on arm, https://crbug.com/744956
+ #
+ # debug fission needs DWARF DIEs to be emitted at version 4.
+ # Chrome OS emits Debug Frame in DWARF1 to make breakpad happy. [1]
+ # Unless ohos needs debug fission, DWARF3 is the simplest solution.
+ #
+ # [1] crrev.com/a81d5ade0b043208e06ad71a38bcf9c348a1a52f
+ cflags += [ "-gdwarf-3" ]
+ }
+ cflags += [ "-g2" ]
+ }
+ if (use_debug_fission && !is_nacl && !is_ohos) {
+ # NOTE: Some Chrome OS builds globally set |use_debug_fission| to true,
+ # but they also build some targets against ohos toolchains which aren't
+ # compatible with it.
+ cflags += [ "-gsplit-dwarf" ]
+ }
+ asmflags = cflags
+ ldflags = []
+
+ if (!is_mac && !is_ios && !is_nacl && current_cpu != "x86" &&
+ (use_gold || use_lld)) {
+ if (is_clang) {
+ # This flag enables the GNU-format pubnames and pubtypes sections,
+ # which lld needs in order to generate a correct GDB index.
+ cflags += [ "-ggnu-pubnames" ]
+ }
+ ldflags += [ "-Wl,--gdb-index" ]
+ }
+ }
+}
+
+# Minimal symbols.
+# This config guarantees to hold symbol for stack trace which are shown to user
+# when crash happens in unittests running on buildbot.
+config("minimal_symbols") {
+ if (is_win) {
+ # Linker symbols for backtraces only.
+ cflags = []
+ ldflags = [ "/DEBUG" ]
+
+ # All configs using /DEBUG should include this:
+ configs = [ ":win_pdbaltpath" ]
+
+ # For win/asan, get stack traces with full line numbers.
+ # AddressSanitizerTests.TestAddressSanitizer needs this, and since
+ # win/asan isn't a default cq bot the build time hit is ok.
+ if (is_clang && using_sanitizer) {
+ # -gline-tables-only is the same as -g1, but clang-cl only exposes the
+ # former.
+ cflags += [ "-gline-tables-only" ]
+ }
+ } else {
+ cflags = []
+ if (current_cpu == "arm") {
+ # dump_syms has issues with dwarf4 on arm, https://crbug.com/744956
+ cflags += [ "-gdwarf-3" ]
+ }
+ cflags += [ "-g1" ]
+ ldflags = []
+ if (is_ohos && is_clang) {
+ # ohos defaults to symbol_level=1 builds in production builds
+ # (https://crbug.com/648948), but clang, unlike gcc, doesn't emit
+ # DW_AT_linkage_name in -g1 builds. -fdebug-info-for-profiling enables
+ # that (and a bunch of other things we don't need), so that we get
+ # qualified names in stacks.
+ cflags += [ "-fdebug-info-for-profiling" ]
+ }
+
+ # Note: -gsplit-dwarf implicitly turns on -g2 with clang, so don't pass it.
+ asmflags = cflags
+ }
+}
+
+# No symbols.
+config("no_symbols") {
+ if (!is_win) {
+ cflags = [ "-g0" ]
+ asmflags = cflags
+ }
+}
+
+# Default symbols.
+config("default_symbols") {
+ if (symbol_level == 0) {
+ configs = [ ":no_symbols" ]
+ } else if (symbol_level == 1) {
+ configs = [ ":minimal_symbols" ]
+ } else if (symbol_level == 2) {
+ configs = [ ":symbols" ]
+ } else {
+ assert(false)
+ }
+
+ # This config is removed by base unittests app.
+ if (is_ohos && is_clang && strip_debug_info) {
+ configs += [ ":strip_debug" ]
+ }
+}
+
+config("strip_debug") {
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [ "-Wl,--strip-debug" ]
+}
+
+if (is_ios || is_mac) {
+ # On Mac and iOS, this enables support for ARC (automatic ref-counting).
+ # See http://clang.llvm.org/docs/AutomaticReferenceCounting.html.
+ config("enable_arc") {
+ common_flags = [ "-fobjc-arc" ]
+ cflags_objc = common_flags
+ cflags_objcc = common_flags
+ }
+}
+
+config("no_common") {
+ if (is_clang) {
+ cflags = [ "-fno-common" ]
+ asmflags = cflags
+ }
+}
diff --git a/dsoftbus/build/config/compiler/compiler.gni b/dsoftbus/build/config/compiler/compiler.gni
new file mode 100755
index 0000000000000000000000000000000000000000..b77100ace941e6c5d7c6c9d814e2621918f5d22c
--- /dev/null
+++ b/dsoftbus/build/config/compiler/compiler.gni
@@ -0,0 +1,198 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+import("//build/misc/overrides/build.gni")
+
+if (is_ohos) {
+ import("//build/config/ohos/abi.gni")
+}
+if (current_cpu == "arm" || current_cpu == "arm64") {
+ import("//build/config/arm.gni")
+}
+
+declare_args() {
+ # How many symbols to include in the build. This affects the performance of
+ # the build since the symbols are large and dealing with them is slow.
+ # 2 means regular build with symbols.
+ # 1 means minimal symbols, usually enough for backtraces only. Symbols with
+ # internal linkage (static functions or those in anonymous namespaces) may not
+ # appear when using this level.
+ # 0 means no symbols.
+ # -1 means auto-set according to debug/release and platform.
+ symbol_level = -1
+
+ # ohos-only: Strip the debug info of libraries within lib.unstripped to
+ # reduce size. As long as symbol_level > 0, this will still allow stacks to be
+ # symbolized.
+ strip_debug_info = false
+
+ # Compile in such a way as to enable profiling of the generated code. For
+ # example, don't omit the frame pointer and leave in symbols.
+ enable_profiling = false
+
+ # use_debug_fission: whether to use split DWARF debug info
+ # files. This can reduce link time significantly, but is incompatible
+ # with some utilities such as icecc and ccache. Requires gold and
+ # gcc >= 4.8 or clang.
+ # http://gcc.gnu.org/wiki/DebugFission
+ #
+ # This is a placeholder value indicating that the code below should set
+ # the default. This is necessary to delay the evaluation of the default
+ # value expression until after its input values such as use_gold have
+ # been set, e.g. by a toolchain_args() block.
+ use_debug_fission = "default"
+
+ # Enables support for ThinLTO, which links 3x-10x faster than full LTO. See
+ # also http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html
+ use_thin_lto = is_cfi || (is_ohos && is_official_build)
+
+ # Tell VS to create a PDB that references information in .obj files rather
+ # than copying it all. This should improve linker performance. mspdbcmf.exe
+ # can be used to convert a fastlink pdb to a normal one.
+ is_win_fastlink = false
+
+ # Whether or not we should turn on incremental WPO. Only affects the VS
+ # Windows build.
+ use_incremental_wpo = false
+
+ # Whether or not we should use position independent code.
+ use_pic = true
+
+ # Whether we're using a sample profile collected on an architecture different
+ # than the one we're compiling for.
+ #
+ # It's currently not possible to collect AFDO profiles on anything but
+ # x86{,_64}.
+ using_mismatched_sample_profile = current_cpu != "x64" && current_cpu != "x86"
+}
+
+assert(!is_cfi || use_thin_lto, "CFI requires ThinLTO")
+
+# If true, optimize for size. Does not affect windows builds.
+# Linux & Mac favor speed over size.
+optimize_for_size = is_ohos || is_ios
+
+declare_args() {
+ # Whether we should consider the profile we're using to be accurate. Accurate
+ # profiles have the benefit of (potentially substantial) binary size
+ # reductions, by instructing the compiler to optimize cold and uncovered
+ # functions heavily for size. This often comes at the cost of performance.
+ sample_profile_is_accurate = optimize_for_size
+}
+
+# Determine whether to enable or disable frame pointers, based on the platform
+# and build arguments.
+if (is_mac || is_ios || is_linux) {
+ enable_frame_pointers = true
+} else if (is_win) {
+ # 64-bit Windows ABI doesn't support frame pointers.
+ if (current_cpu == "x64") {
+ enable_frame_pointers = false
+ } else {
+ enable_frame_pointers = true
+ }
+} else if (is_chromeos) {
+ # ChromeOS generally prefers frame pointers, to support CWP.
+ # However, Clang does not currently generate usable frame pointers in ARM
+ # 32-bit builds (https://bugs.llvm.org/show_bug.cgi?id=18505) so disable them
+ # there to avoid the unnecessary overhead.
+ enable_frame_pointers = current_cpu != "arm"
+} else if (is_ohos) {
+ enable_frame_pointers =
+ enable_profiling ||
+ # Ensure that stacks from arm64 crash dumps are usable (crbug.com/391706).
+ current_cpu == "arm64" ||
+ # For x86 ohos, unwind tables are huge without frame pointers
+ # (crbug.com/762629). Enabling frame pointers grows the code size slightly
+ # but overall shrinks binaries considerably by avoiding huge unwind
+ # tables.
+ (current_cpu == "x86" && !exclude_unwind_tables && optimize_for_size) ||
+ using_sanitizer
+} else {
+ # Explicitly ask for frame pointers, otherwise:
+ # * Stacks may be missing for sanitizer and profiling builds.
+ # * Debug tcmalloc can crash (crbug.com/636489).
+ enable_frame_pointers = using_sanitizer || enable_profiling || is_debug
+}
+
+# In general assume that if we have frame pointers then we can use them to
+# unwind the stack. However, this requires that they are enabled by default for
+# most translation units, that they are emitted correctly, and that the
+# compiler or platform provides a way to access them.
+can_unwind_with_frame_pointers = enable_frame_pointers
+if (current_cpu == "arm" && arm_use_thumb) {
+ # We cannot currently unwind ARM Thumb frame pointers correctly.
+ # See https://bugs.llvm.org/show_bug.cgi?id=18505
+ can_unwind_with_frame_pointers = false
+} else if (is_win) {
+ # Windows 32-bit does provide frame pointers, but the compiler does not
+ # provide intrinsics to access them, so we don't use them.
+ can_unwind_with_frame_pointers = false
+}
+
+assert(!can_unwind_with_frame_pointers || enable_frame_pointers)
+
+# Unwinding with CFI table is only possible on static library builds and
+# required only when frame pointers are not enabled.
+can_unwind_with_cfi_table = is_ohos && !is_component_build &&
+ !enable_frame_pointers && current_cpu == "arm"
+
+declare_args() {
+ # Set to true to use lld, the LLVM linker. This flag may be used on Windows,
+ # Linux.
+ use_lld = is_clang &&
+ (is_win || (use_thin_lto && target_os != "chromeos") ||
+ (is_linux && current_cpu == "x64" && target_os != "chromeos") ||
+ (is_ohos && (current_cpu != "arm" || arm_version >= 7)))
+}
+
+declare_args() {
+ # Whether to use the gold linker from binutils instead of lld or bfd.
+ use_gold = !use_lld &&
+ ((is_linux && (current_cpu == "x64" || current_cpu == "x86" ||
+ current_cpu == "arm")) ||
+ (is_ohos && (current_cpu == "x86" || current_cpu == "x64" ||
+ current_cpu == "arm" || current_cpu == "arm64")))
+}
+
+# If it wasn't manually set, set to an appropriate default.
+assert(symbol_level >= -1 && symbol_level <= 2, "Invalid symbol_level")
+if (symbol_level == -1) {
+ if (is_ohos && use_order_profiling) {
+ # With instrumentation enabled, debug info puts libchrome.so over 4gb, which
+ # causes the linker to produce an invalid ELF. http://crbug.com/574476
+ symbol_level = 0
+ } else if (is_ohos && !is_component_build &&
+ !(ohos_64bit_target_cpu && !build_app_secondary_abi)) {
+ # Reduce symbol level when it will cause invalid elf files to be created
+ # (due to file size). https://crbug.com/648948.
+ symbol_level = 1
+ } else if ((!is_nacl && !is_linux) || is_debug || is_official_build) {
+ # Linux builds slower by having symbols as part of the target binary,
+ # whereas Mac and Windows have them separate, so in Release Linux, default
+ # them off, but keep them on for Official builds and Chromecast builds.
+ symbol_level = 2
+ } else if (using_sanitizer) {
+ # Sanitizers need line table info for stack traces. They don't need type
+ # info or variable info, so we can leave that out to speed up the build.
+ # Sanitizers also require symbols for filename suppressions to work.
+ symbol_level = 1
+ } else {
+ symbol_level = 0
+ }
+}
+
+# Assert that the configuration isn't going to hit https://crbug.com/648948.
+# An exception is made when target_os == "chromeos" as we only use the ohos
+# toolchain there to build relatively small binaries.
+assert(ignore_elf32_limitations || !is_ohos || target_os == "chromeos" ||
+ (ohos_64bit_target_cpu && !build_app_secondary_abi) ||
+ is_component_build || symbol_level < 2,
+ "ohos 32-bit non-component builds cannot have symbol_level=2 " +
+ "due to 4GiB file size limit, see https://crbug.com/648948. " +
+ "If you really want to try this out, " +
+ "set ignore_elf32_limitations=true.")
diff --git a/dsoftbus/build/config/coverage/BUILD.gn b/dsoftbus/build/config/coverage/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..c11cbecec949bf51beada78bc1a7b61ff7ba58e6
--- /dev/null
+++ b/dsoftbus/build/config/coverage/BUILD.gn
@@ -0,0 +1,38 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/coverage/coverage.gni")
+
+config("default_coverage") {
+ if (use_clang_coverage) {
+ cflags = [
+ "--coverage",
+
+ # Following experimental flags removes unused header functions from the
+ # coverage mapping data embedded in the test binaries, and the reduction
+ # of binary size enables building Chrome's large unit test targets on
+ # MacOS. Please refer to crbug.com/796290 for more details.
+ "-mllvm",
+ "-limited-coverage-experimental=true",
+ ]
+
+ ldflags = []
+ if (!is_win) {
+ ldflags = [ "--coverage" ]
+ cflags += [ "-fno-use-cxa-atexit" ]
+ } else {
+ # Windows directly calls link.exe instead of the compiler driver when
+ # linking. Hence, pass the runtime libraries instead of
+ # -fsanitize=address.
+ if (target_cpu == "x64") {
+ ldflags += [ "clang_rt.profile-x86_64.lib" ]
+ } else if (target_cpu == "x86") {
+ ldflags += [ "clang_rt.profile-i386.lib" ]
+ } else {
+ assert(false,
+ "use_clang_coverage=true not supported yet for this target_cpu")
+ }
+ }
+ }
+}
diff --git a/dsoftbus/build/config/coverage/coverage.gni b/dsoftbus/build/config/coverage/coverage.gni
new file mode 100755
index 0000000000000000000000000000000000000000..61716dd425a7d39980b026ba3a6dde7bf62e6e6a
--- /dev/null
+++ b/dsoftbus/build/config/coverage/coverage.gni
@@ -0,0 +1,13 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Enable Clang's Source-based Code Coverage.
+ use_clang_coverage = false
+}
+
+assert(!use_clang_coverage || is_clang,
+ "Clang Source-based Code Coverage requires clang.")
diff --git a/dsoftbus/build/config/deps_revisions.gni b/dsoftbus/build/config/deps_revisions.gni
new file mode 100755
index 0000000000000000000000000000000000000000..4b2c00fb72e44df17d980785a25891a570ce66ec
--- /dev/null
+++ b/dsoftbus/build/config/deps_revisions.gni
@@ -0,0 +1,12 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+ # The svn revisions that belong to the git hashes in DEPS. Used to cause full
+ # rebuilds on libc++ rolls.
+ clang_format_svn_revision = "302580"
+ libcxx_svn_revision = "344254"
+ libcxxabi_svn_revision = "344215"
+ libunwind_svn_revision = "343990"
+}
diff --git a/dsoftbus/build/config/features.gni b/dsoftbus/build/config/features.gni
new file mode 100755
index 0000000000000000000000000000000000000000..239ae17f65645a7b53ef8eae8c2d5736dcf19468
--- /dev/null
+++ b/dsoftbus/build/config/features.gni
@@ -0,0 +1,28 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================
+# PLEASE DO NOT ADD MORE FLAGS TO THIS FILE
+# =============================================
+#
+# These flags are effectively global. Your feature flag should go near the
+# code it controls. Most of these items are here now because they control
+# legacy global #defines passed to the compiler (now replaced with generated
+# buildflag headers -- see //build/buildflag_header.gni).
+#
+# There is more advice on where to put build flags in the "Build flag" section
+# of //build/config/BUILDCONFIG.gn.
+
+if (is_ohos) {
+ import("//build/config/ohos/config.gni")
+}
+
+declare_args() {
+ # libudev usage. This currently only affects the content layer.
+ use_udev = is_linux
+
+ use_dbus = is_linux
+
+ use_gio = is_linux
+}
diff --git a/dsoftbus/build/config/gcc/BUILD.gn b/dsoftbus/build/config/gcc/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..b2b1c8ebe06c080fdf1b6a457ad998a22d59aca6
--- /dev/null
+++ b/dsoftbus/build/config/gcc/BUILD.gn
@@ -0,0 +1,129 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # When non empty, overrides the target rpath value. This allows a user to
+ # make a Chromium build where binaries and shared libraries are meant to be
+ # installed into separate directories, like /usr/bin/chromium and
+ # /usr/lib/chromium for instance. It is useful when a build system that
+ # generates a whole target root filesystem (like Yocto) is used on top of gn,
+ # especially when cross-compiling.
+ # Note: this gn arg is similar to gyp target_rpath generator flag.
+ gcc_target_rpath = ""
+ ldso_path = ""
+}
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functions we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+ cflags = [ "-fvisibility=hidden" ]
+
+ # Visibility attribute is not supported on AIX.
+ if (current_os != "aix") {
+ cflags_cc = [ "-fvisibility-inlines-hidden" ]
+ cflags_objcc = cflags_cc
+ }
+}
+
+config("symbol_visibility_inline_hidden") {
+ cflags_cc = [ "-fvisibility-inlines-hidden" ]
+}
+
+# This config is usually set when :symbol_visibility_hidden is removed.
+# It's often a good idea to set visibility explicitly, as there're flags
+# which would error out otherwise (e.g. -fsanitize=cfi-unrelated-cast)
+config("symbol_visibility_default") {
+ cflags = [ "-fvisibility=default" ]
+}
+
+# The rpath is the dynamic library search path. Setting this config on a link
+# step will put the directory where the build generates shared libraries into
+# the rpath.
+#
+# This is required for component builds since the build generates many shared
+# libraries in the build directory that we expect to be automatically loaded.
+# It will be automatically applied in this case by :executable_ldconfig.
+#
+# In non-component builds, certain test binaries may expect to load dynamic
+# libraries from the current directory. As long as these aren't distributed,
+# this is OK. For these cases use something like this:
+#
+# if (is_linux && !is_component_build) {
+# configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
+# }
+config("rpath_for_built_shared_libraries") {
+ if (!is_ohos) {
+ # Note: ohos doesn't support rpath.
+ rpath_link = "."
+ if (current_toolchain != default_toolchain || gcc_target_rpath == "") {
+ ldflags = [
+ # Want to pass "\$". GN will re-escape as required for ninja.
+ "-Wl,-rpath=\$ORIGIN/${rpath_link}",
+ "-Wl,-rpath-link=${rpath_link}",
+ ]
+ } else {
+ ldflags = [
+ "-Wl,-rpath=${gcc_target_rpath}",
+ "-Wl,-rpath-link=${rpath_link}",
+ ]
+ }
+ }
+
+ if (!is_ohos) {
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ if (current_toolchain == default_toolchain && ldso_path != "") {
+ ldflags += [ "-Wl,--dynamic-linker=${ldso_path}" ]
+ }
+ }
+}
+
+# Settings for executables.
+config("executable_ldconfig") {
+ ldflags = []
+
+ if (is_ohos) {
+ ldflags += [
+ "-Bdynamic",
+ "-Wl,-z,nocopyreloc",
+ ]
+ }
+
+ if (!is_ohos) {
+ # See the rpath_for... config above for why this is necessary for component
+ # builds.
+ if (is_component_build) {
+ configs = [ ":rpath_for_built_shared_libraries" ]
+ }
+ }
+
+ if (!is_ohos && current_os != "aix") {
+ # Find the path containing shared libraries for this toolchain
+ # relative to the build directory. ${root_out_dir} will be a
+ # subdirectory of ${root_build_dir} when cross compiling.
+ _rpath_link = rebase_path(root_out_dir, root_build_dir)
+ ldflags += [
+ "-Wl,-rpath-link=$_rpath_link",
+
+ # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+ # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+ "-Wl,--disable-new-dtags",
+ ]
+ }
+}
diff --git a/dsoftbus/build/config/linux/BUILD.gn b/dsoftbus/build/config/linux/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..bc30de9ce465fb2b3c129e5bc4c2f01ec3075274
--- /dev/null
+++ b/dsoftbus/build/config/linux/BUILD.gn
@@ -0,0 +1,107 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+
+group("linux") {
+ visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# Linux-only.
+config("compiler") {
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Linux-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ # Set here because OS_CHROMEOS cannot be autodetected in build_config.h like OS_LINUX and the like.
+ if (is_chromeos) {
+ defines = [ "OS_CHROMEOS" ]
+ }
+ libs = []
+ if ((!is_chromeos || default_toolchain != "//build/toolchain/cros:target") &&
+ !use_custom_libcxx) {
+ libs += [ "atomic" ]
+ }
+
+ # double framework, use prebuilt libc++.
+ if (is_double_framework) {
+ if (current_cpu == "x86") {
+ libs += [ "${asdk_libs_dir}/ndk/libcxx/linux_x86/libc++.so" ]
+ } else {
+ libs += [ "${asdk_libs_dir}/ndk/libcxx/linux_x86_64/libc++.so" ]
+ }
+ }
+}
+
+config("executable_config") {
+ cflags = [ "-fPIE" ]
+ asmflags = [ "-fPIE" ]
+ ldflags = [ "-pie" ]
+}
+
+config("x11") {
+ libs = [
+ "X11",
+ "X11-xcb",
+ "xcb",
+ "Xcomposite",
+ "Xcursor",
+ "Xdamage",
+ "Xext",
+ "Xfixes",
+ "Xi",
+ "Xrender",
+ "Xtst",
+ ]
+}
+
+config("xcomposite") {
+ libs = [ "Xcomposite" ]
+}
+
+config("xext") {
+ libs = [ "Xext" ]
+}
+
+config("xrandr") {
+ libs = [ "Xrandr" ]
+}
+
+config("xscrnsaver") {
+ libs = [ "Xss" ]
+}
+
+config("xfixes") {
+ libs = [ "Xfixes" ]
+}
+
+config("libcap") {
+ libs = [ "cap" ]
+}
+
+config("xi") {
+ libs = [ "Xi" ]
+}
+
+config("xtst") {
+ libs = [ "Xtst" ]
+}
+
+config("libresolv") {
+ libs = [ "resolv" ]
+}
+
+# Ensures all exported symbols are added to the dynamic symbol table. This is
+# necessary to expose Chrome's custom operator new() and operator delete() (and
+# other memory-related symbols) to libraries. Otherwise, they might
+# (de)allocate memory on a different heap, which would spell trouble if pointers
+# to heap-allocated memory are passed over shared library boundaries.
+config("export_dynamic") {
+ ldflags = [ "-rdynamic" ]
+}
diff --git a/dsoftbus/build/config/linux/pkg-config.py b/dsoftbus/build/config/linux/pkg-config.py
new file mode 100755
index 0000000000000000000000000000000000000000..a6682b9fe875f60ae2034e4a5475273f2f48aad7
--- /dev/null
+++ b/dsoftbus/build/config/linux/pkg-config.py
@@ -0,0 +1,245 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ , , , , ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v " where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s " where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a " where arch is either "x86" or "x64".
+# cross systemroots place pkgconfig files at /usr/share/pkgconfig
+# and one of /usr/lib/pkgconfig or /usr/lib64/pkgconfig
+# depending on whether the systemroot is for a 32 or 64 bit architecture. They
+# specify the 'lib' or 'lib64' of the pkgconfig path by defining the
+# 'system_libdir' variable in the args.gn file. pkg_config.gni communicates
+# this variable to this script with the "--system_libdir " flag.
+# If no flag is provided, then pkgconfig files are assumed to come from
+# /usr/lib/pkgconfig.
+#
+# Additionally, you can specify the option --atleast-version. This will skip
+# the normal outputting of a dictionary and instead print true or false,
+# depending on the return value of pkg-config for the given package.
+
+
+def SetConfigPath(options):
+ """Set the PKG_CONFIG_LIBDIR environment variable.
+
+ This takes into account any sysroot and architecture specification from the
+ options on the given command line.
+ """
+
+ sysroot = options.sysroot
+ assert sysroot
+
+ # Compute the library path name based on the architecture.
+ arch = options.arch
+ if sysroot and not arch:
+ print("You must specify an architecture via -a if using a sysroot.")
+ sys.exit(1)
+
+ libdir = sysroot + '/usr/' + options.system_libdir + '/pkgconfig'
+ libdir += ':' + sysroot + '/usr/share/pkgconfig'
+ os.environ['PKG_CONFIG_LIBDIR'] = libdir
+ return libdir
+
+
+def GetPkgConfigPrefixToStrip(options, args):
+ """Returns the prefix from pkg-config where packages are installed.
+
+ This returned prefix is the one that should be stripped from the beginning of
+ directory names to take into account sysroots.
+ """
+ # Some sysroots, like the Chromium OS ones, may generate paths that are not
+ # relative to the sysroot. For example,
+ # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+ # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+ # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+ # To support this correctly, it's necessary to extract the prefix to strip
+ # from pkg-config's |prefix| variable.
+ prefix = subprocess.check_output([options.pkg_config,
+ "--variable=prefix"] + args,
+ env=os.environ)
+ if prefix[-4] == '/usr':
+ return prefix[4:]
+ return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+ """Returns true if the first argument matches any regular expression in the
+ given list."""
+ for regexp in list_of_regexps:
+ if regexp.search(flag) is not None:
+ return True
+ return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+ """Rewrites a path by stripping the prefix and prepending the sysroot."""
+ if os.path.isabs(path) and not path.startswith(sysroot):
+ if path.startswith(strip_prefix):
+ path = path[len(strip_prefix):]
+ path = path.lstrip('/')
+ return os.path.join(sysroot, path)
+ else:
+ return path
+
+
+def main():
+ # If this is run on non-Linux platforms, just return nothing and indicate
+ # success. This allows us to "kind of emulate" a Linux build from other
+ # platforms.
+ if "linux" not in sys.platform:
+ print("[[],[],[],[],[]]")
+ return 0
+
+ parser = OptionParser()
+ parser.add_option('-d', '--debug', action='store_true')
+ parser.add_option('-p', action='store', dest='pkg_config', type='string',
+ default='pkg-config')
+ parser.add_option('-v', action='append', dest='strip_out', type='string')
+ parser.add_option('-s', action='store', dest='sysroot', type='string')
+ parser.add_option('-a', action='store', dest='arch', type='string')
+ parser.add_option('--system_libdir', action='store', dest='system_libdir',
+ type='string', default='lib')
+ parser.add_option('--atleast-version', action='store',
+ dest='atleast_version', type='string')
+ parser.add_option('--libdir', action='store_true', dest='libdir')
+ parser.add_option('--dridriverdir', action='store_true', dest='dridriverdir')
+ parser.add_option('--version-as-components', action='store_true',
+ dest='version_as_components')
+ (options, args) = parser.parse_args()
+
+ # Make a list of regular expressions to strip out.
+ strip_out = []
+ if options.strip_out is not None:
+ for regexp in options.strip_out:
+ strip_out.append(re.compile(regexp))
+
+ if options.sysroot:
+ libdir = SetConfigPath(options)
+ if options.debug:
+ sys.stderr.write('PKG_CONFIG_LIBDIR=%s\n' % libdir)
+ prefix = GetPkgConfigPrefixToStrip(options, args)
+ else:
+ prefix = ''
+
+ if options.atleast_version:
+ # When asking for the return value, just run pkg-config and print the
+ # return value, no need to do other work.
+ if not subprocess.call([options.pkg_config,
+ "--atleast-version=" + options.atleast_version] +
+ args):
+ print("true")
+ else:
+ print("false")
+ return 0
+
+ if options.version_as_components:
+ cmd = [options.pkg_config, "--modversion"] + args
+ try:
+ version_string = subprocess.check_output(cmd)
+ except:
+ sys.stderr.write('Error from pkg-config.\n')
+ return 1
+ print(json.dumps(list(map(int, version_string.strip().split(".")))))
+ return 0
+
+ if options.libdir:
+ cmd = [options.pkg_config, "--variable=libdir"] + args
+ if options.debug:
+ sys.stderr.write('Running: %s\n' % cmd)
+ try:
+ libdir = subprocess.check_output(cmd)
+ except:
+ print("Error from pkg-config.")
+ return 1
+ sys.stdout.write(libdir.strip())
+ return 0
+
+ if options.dridriverdir:
+ cmd = [options.pkg_config, "--variable=dridriverdir"] + args
+ if options.debug:
+ sys.stderr.write('Running: %s\n' % cmd)
+ try:
+ dridriverdir = subprocess.check_output(cmd)
+ except:
+ print("Error from pkg-config.")
+ return 1
+ sys.stdout.write(dridriverdir.strip())
+ return
+
+ cmd = [options.pkg_config, "--cflags", "--libs"] + args
+ if options.debug:
+ sys.stderr.write('Running: %s\n' % ' '.join(cmd))
+
+ try:
+ flag_string = subprocess.check_output(cmd)
+ except:
+ sys.stderr.write('Could not run pkg-config.\n')
+ return 1
+
+ # For now just split on spaces to get the args out. This will break if
+ # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+ # to happen in practice.
+ all_flags = flag_string.strip().split(' ')
+
+ sysroot = options.sysroot
+ if not sysroot:
+ sysroot = ''
+
+ includes = []
+ cflags = []
+ libs = []
+ lib_dirs = []
+
+ for flag in all_flags[:]:
+ if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+ continue
+
+ if flag[:2] == '-l':
+ libs.append(RewritePath(flag[2:], prefix, sysroot))
+ elif flag[:2] == '-L':
+ lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+ elif flag[:2] == '-I':
+ includes.append(RewritePath(flag[2:], prefix, sysroot))
+ elif flag[:3] == '-Wl':
+ # Don't allow libraries to control ld flags. These should be specified
+ # only in build files.
+ pass
+ elif flag == '-pthread':
+ # Many libs specify "-pthread" which we don't need since we always
+ # include this anyway. Removing it here prevents a bunch of duplicate
+ # inclusions on the command line.
+ pass
+ else:
+ cflags.append(flag)
+
+ # Output a GN array, the first one is the cflags, the second are the libs.
+ # The JSON formatter prints GN compatible lists when everything is a list of
+ # strings.
+ print(json.dumps([includes, cflags, libs, lib_dirs]))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/config/linux/pkg_config.gni b/dsoftbus/build/config/linux/pkg_config.gni
new file mode 100755
index 0000000000000000000000000000000000000000..9d783c3eda58b7de253d9e3547de4b389a86e74c
--- /dev/null
+++ b/dsoftbus/build/config/linux/pkg_config.gni
@@ -0,0 +1,128 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+# pkg_config("mything") {
+# packages = [ "mything1", "mything2" ]
+# defines = [ "ENABLE_AWESOME" ]
+# }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+# extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+# ignore_libs = true
+
+declare_args() {
+ # A pkg-config wrapper to call instead of trying to find and call the right
+ # pkg-config directly. Wrappers like this are common in cross-compilation
+ # environments.
+ # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+ # the sysroot mechanism to find the right .pc files.
+ pkg_config = ""
+
+ # A optional pkg-config wrapper to use for tools built on the host.
+ host_pkg_config = ""
+
+ # cross systemroots place pkgconfig files at /usr/share/pkgconfig
+ # and one of /usr/lib/pkgconfig or /usr/lib64/pkgconfig
+ # depending on whether the systemroot is for a 32 or 64 bit architecture.
+ #
+ # When build under GYP, cross board builds specify the 'system_libdir' variable
+ # as part of the GYP_DEFINES provided by the cross emerge build or simple
+ # chrome build scheme. This variable permits controlling this for GN builds
+ # in similar fashion by setting the `system_libdir` variable in the build's
+ # args.gn file to 'lib' or 'lib64' as appropriate for the target architecture.
+ system_libdir = "lib"
+}
+
+pkg_config_script = "//build/config/linux/pkg-config.py"
+
+# Define the args we pass to the pkg-config script for other build files that
+# need to invoke it manually.
+pkg_config_args = []
+
+if (sysroot != "") {
+ # Pass the sysroot if we're using one (it requires the CPU arch also).
+ pkg_config_args += [
+ "-s",
+ rebase_path(sysroot),
+ "-a",
+ current_cpu,
+ ]
+}
+
+if (pkg_config != "") {
+ pkg_config_args += [
+ "-p",
+ pkg_config,
+ ]
+}
+
+# Only use the custom libdir when building with the target sysroot.
+if (target_sysroot != "" && sysroot == target_sysroot) {
+ pkg_config_args += [
+ "--system_libdir",
+ system_libdir,
+ ]
+}
+
+if (host_pkg_config != "") {
+ host_pkg_config_args = [
+ "-p",
+ host_pkg_config,
+ ]
+} else {
+ host_pkg_config_args = pkg_config_args
+}
+
+template("pkg_config") {
+ assert(defined(invoker.packages),
+ "Variable |packages| must be defined to be a list in pkg_config.")
+ config(target_name) {
+ if (host_toolchain == current_toolchain) {
+ args = host_pkg_config_args + invoker.packages
+ } else {
+ args = pkg_config_args + invoker.packages
+ }
+ if (defined(invoker.extra_args)) {
+ args += invoker.extra_args
+ }
+
+ pkgresult = exec_script(pkg_config_script, args, "value")
+ cflags = pkgresult[1]
+
+ foreach(include, pkgresult[0]) {
+ if (use_sysroot) {
+ # We want the system include paths to use -isystem instead of -I to
+ # suppress warnings in those headers.
+ include_relativized = rebase_path(include, root_build_dir)
+ cflags += [ "-isystem$include_relativized" ]
+ } else {
+ cflags += [ "-I$include" ]
+ }
+ }
+
+ if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+ libs = pkgresult[2]
+ lib_dirs = pkgresult[3]
+ }
+
+ forward_variables_from(invoker,
+ [
+ "defines",
+ "visibility",
+ ])
+ }
+}
diff --git a/dsoftbus/build/config/mac/BUILD.gn b/dsoftbus/build/config/mac/BUILD.gn
new file mode 100644
index 0000000000000000000000000000000000000000..a0a163425678ce049c62aca2a3ba3002d0793ad4
--- /dev/null
+++ b/dsoftbus/build/config/mac/BUILD.gn
@@ -0,0 +1,114 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/mac/symbols.gni")
+import("//build/config/sysroot.gni")
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic.
+config("compiler") {
+ # These flags are shared between the C compiler and linker.
+ common_mac_flags = []
+
+ # CPU architecture.
+ if (current_cpu == "x64") {
+ common_mac_flags += [
+ "-arch",
+ "x86_64",
+ ]
+ } else if (current_cpu == "x86") {
+ common_mac_flags += [
+ "-arch",
+ "i386",
+ ]
+ }
+
+ # This is here so that all files get recompiled after an Xcode update.
+ # (defines are passed via the command line, and build system rebuild things
+ # when their commandline changes). Nothing should ever read this define.
+ defines = [ "CR_XCODE_VERSION=$xcode_version" ]
+
+ defines += [
+ "_LIBCPP_CONFIG_SITE",
+ "_LIBCPP_HAS_MERGED_TYPEINFO_NAMES_DEFAULT=0",
+ ]
+
+ asmflags = common_mac_flags
+ cflags = common_mac_flags
+
+ # Without this, the constructors and destructors of a C++ object inside
+ # an Objective C struct won't be called, which is very bad.
+ cflags_objcc = [ "-fobjc-call-cxx-cdtors" ]
+
+ ldflags = common_mac_flags
+
+ # Create a new read-only segment for protected memory. The default segments
+ # (__TEXT and __DATA) are mapped read-execute and read-write by default.
+ ldflags += [
+ "-segprot",
+ "PROTECTED_MEMORY",
+ "rw",
+ "r",
+ ]
+
+ if (save_unstripped_output) {
+ ldflags += [ "-Wcrl,unstripped," + rebase_path(root_out_dir) ]
+ }
+
+ if (export_libcxxabi_from_executables) {
+ ldflags += [ "-Wl,-undefined,dynamic_lookup" ]
+ }
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Mac-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ common_flags = [
+ "-isysroot",
+ sysroot,
+ "-mmacosx-version-min=$mac_deployment_target",
+ ]
+
+ asmflags = common_flags
+ cflags = common_flags
+ ldflags = common_flags
+
+ # Prevent Mac OS X AssertMacros.h (included by system header) from defining
+ # macros that collide with common names, like 'check', 'require', and
+ # 'verify'.
+ # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+ defines = [ "__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0" ]
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+ ldflags = [ "-Wl,-ObjC" ] # Always load Objective-C categories and classes.
+
+ if (is_component_build) {
+ ldflags += [
+ # Path for loading shared libraries for unbundled binaries.
+ "-Wl,-rpath,@loader_path/.",
+
+ # Path for loading shared libraries for bundled binaries. Get back from
+ # Binary.app/Contents/MacOS.
+ "-Wl,-rpath,@loader_path/../../..",
+ ]
+ }
+}
+
+# The ldflags referenced below are handled by
+# //build/toolchain/mac/linker_driver.py.
+# Remove this config if a target wishes to change the arguments passed to the
+# strip command during linking. This config by default strips all symbols
+# from a binary, but some targets may wish to specify an exports file to
+# preserve specific symbols.
+config("strip_all") {
+ if (enable_stripping) {
+ ldflags = [ "-Wcrl,strip,-x,-S" ]
+ }
+}
diff --git a/dsoftbus/build/config/mac/mac_sdk.gni b/dsoftbus/build/config/mac/mac_sdk.gni
new file mode 100644
index 0000000000000000000000000000000000000000..8990bde36cc0f1f5f115198eb6f1e26303444674
--- /dev/null
+++ b/dsoftbus/build/config/mac/mac_sdk.gni
@@ -0,0 +1,108 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/mac/mac_sdk_overrides.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # The MACOSX_DEPLOYMENT_TARGET variable used when compiling. This partially
+ # controls the minimum supported version of macOS for Chromium by
+ # affecting the symbol availability rules. This may differ from
+ # mac_min_system_version when dropping support for older macOSes but where
+ # additional code changes are required to be compliant with the availability
+ # rules.
+ # Must be of the form x.x.x for Info.plist files.
+ mac_deployment_target = "10.13.0"
+
+ # The value of the LSMinimumSystemVersion in Info.plist files. This partially
+ # controls the minimum supported version of macOS for Chromium by
+ # affecting the Info.plist. This may differ from mac_deployment_target when
+ # dropping support for older macOSes. This should be greater than or equal to
+ # the mac_deployment_target version.
+ # Must be of the form x.x.x for Info.plist files.
+ mac_min_system_version = "10.13.0"
+
+ # Path to a specific version of the Mac SDK, not including a slash at the end.
+ # If empty, the path to the lowest version greater than or equal to
+ # mac_sdk_min is used.
+ mac_sdk_path = ""
+
+ # The SDK name as accepted by xcodebuild.
+ mac_sdk_name = "macosx"
+}
+
+# Check that the version of macOS SDK used is the one requested when building
+# a version of Chrome shipped to the users. Disable the check if building for
+# iOS as the version macOS SDK used is not relevant for the tool build for the
+# host (they are not shipped) --- this is required as Chrome on iOS is usually
+# build with the latest version of Xcode that may not ship with the version of
+# the macOS SDK used to build Chrome on mac.
+_verify_sdk = is_official_build && target_os != "ios"
+
+find_sdk_args = [ "--print_sdk_path" ]
+if (!use_system_xcode) {
+ find_sdk_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+}
+if (_verify_sdk) {
+ find_sdk_args += [
+ "--verify",
+ mac_sdk_min,
+ "--sdk_path=" + mac_sdk_path,
+ ]
+} else {
+ find_sdk_args += [ mac_sdk_min ]
+}
+
+# The tool will print the SDK path on the first line, and the version on the
+# second line.
+find_sdk_lines =
+ exec_script("//build/misc/mac/find_sdk.py", find_sdk_args, "list lines")
+
+mac_sdk_version = find_sdk_lines[1]
+if (mac_sdk_path == "") {
+ mac_sdk_path = find_sdk_lines[0]
+}
+
+script_name = "//build/config/mac/sdk_info.py"
+sdk_info_args = []
+if (!use_system_xcode) {
+ sdk_info_args += [
+ "--developer_dir",
+ hermetic_xcode_path,
+ ]
+}
+sdk_info_args += [ mac_sdk_name ]
+
+_mac_sdk_result = exec_script(script_name, sdk_info_args, "scope")
+xcode_version = _mac_sdk_result.xcode_version
+xcode_build = _mac_sdk_result.xcode_build
+machine_os_build = _mac_sdk_result.machine_os_build
+
+if (mac_sdk_version != mac_sdk_min &&
+ exec_script("//build/misc/mac/check_return_value.py",
+ [
+ "test",
+ xcode_version,
+ "-ge",
+ "0730",
+ ],
+ "value") != 1) {
+ print(
+ "********************************************************************************")
+ print(
+ " WARNING: The Mac OS X SDK is incompatible with the version of Xcode. To fix,")
+ print(
+ " either upgrade Xcode to the latest version or install the Mac OS X")
+ print(
+ " $mac_sdk_min SDK. For more information, see https://crbug.com/620127.")
+ print()
+ print(" Current SDK Version: $mac_sdk_version")
+ print(" Current Xcode Version: $xcode_version ($xcode_build)")
+ print(
+ "********************************************************************************")
+ assert(false, "SDK is incompatible with Xcode")
+}
diff --git a/dsoftbus/build/config/mac/mac_sdk_overrides.gni b/dsoftbus/build/config/mac/mac_sdk_overrides.gni
new file mode 100644
index 0000000000000000000000000000000000000000..36326786afcb2879a5d703500227cc6c0786b8ee
--- /dev/null
+++ b/dsoftbus/build/config/mac/mac_sdk_overrides.gni
@@ -0,0 +1,22 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains arguments that subprojects may choose to override. It
+# asserts that those overrides are used, to prevent unused args warnings.
+
+_sdk_min_from_env = getenv("FORCE_MAC_SDK_MIN")
+declare_args() {
+ # Minimum supported version of the Mac SDK.
+ if (_sdk_min_from_env == "") {
+ mac_sdk_min = "10.12"
+ } else {
+ mac_sdk_min = _sdk_min_from_env
+ }
+}
+
+# Always assert that mac_sdk_min is used on non-macOS platforms to prevent
+# unused args warnings.
+if (!is_mac) {
+ assert(mac_sdk_min == "10.12" || true)
+}
diff --git a/dsoftbus/build/config/mac/sdk_info.py b/dsoftbus/build/config/mac/sdk_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..36ef75689187d3acf5e877c42e3bbd1f94067f21
--- /dev/null
+++ b/dsoftbus/build/config/mac/sdk_info.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import doctest
+import itertools
+import os
+import subprocess
+import sys
+
+# This script prints information about the build system, the operating
+# system and the iOS or Mac SDK (depending on the platform "iphonesimulator",
+# "iphoneos" or "macosx" generally).
+
+def SplitVersion(version):
+ """Splits the Xcode version to 3 values.
+
+ >>> list(SplitVersion('8.2.1.1'))
+ ['8', '2', '1']
+ >>> list(SplitVersion('9.3'))
+ ['9', '3', '0']
+ >>> list(SplitVersion('10.0'))
+ ['10', '0', '0']
+ """
+ if isinstance(version, bytes):
+ version = version.decode()
+ version = version.split('.')
+ return itertools.islice(itertools.chain(version, itertools.repeat('0')), 0, 3)
+
+def FormatVersion(version):
+ """Converts Xcode version to a format required for DTXcode in Info.plist
+
+ >>> FormatVersion('8.2.1')
+ '0821'
+ >>> FormatVersion('9.3')
+ '0930'
+ >>> FormatVersion('10.0')
+ '1000'
+ """
+ major, minor, patch = SplitVersion(version)
+ return ('%2s%s%s' % (major, minor, patch)).replace(' ', '0')
+
+def FillXcodeVersion(settings):
+ """Fills the Xcode version and build number into |settings|."""
+ lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
+ settings['xcode_version'] = FormatVersion(lines[0].split()[-1])
+ settings['xcode_version_int'] = int(settings['xcode_version'], 10)
+ settings['xcode_build'] = lines[-1].split()[-1]
+
+
+def FillMachineOSBuild(settings):
+ """Fills OS build number into |settings|."""
+ settings['machine_os_build'] = subprocess.check_output(
+ ['sw_vers', '-buildVersion']).strip()
+
+
+def FillSDKPathAndVersion(settings, platform, xcode_version):
+ """Fills the SDK path and version for |platform| into |settings|."""
+ settings['sdk_path'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-path']).strip()
+ settings['sdk_version'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-version']).strip()
+ settings['sdk_platform_path'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-platform-path']).strip()
+ if xcode_version >= '0720':
+ settings['sdk_build'] = subprocess.check_output([
+ 'xcrun', '-sdk', platform, '--show-sdk-build-version']).strip()
+ else:
+ settings['sdk_build'] = settings['sdk_version']
+
+
+if __name__ == '__main__':
+ doctest.testmod()
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--developer_dir", required=False)
+ args, unknownargs = parser.parse_known_args()
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+ if len(unknownargs) != 1:
+ sys.stderr.write(
+ 'usage: %s [iphoneos|iphonesimulator|macosx]\n' %
+ os.path.basename(sys.argv[0]))
+ sys.exit(1)
+
+ settings = {}
+ FillMachineOSBuild(settings)
+ FillXcodeVersion(settings)
+ FillSDKPathAndVersion(settings, unknownargs[0], settings['xcode_version'])
+
+ for key in sorted(settings):
+ value = settings[key]
+ if isinstance(value, bytes):
+ value = value.decode()
+ value = '"%s"' % value
+ print('%s=%s' % (key, value))
diff --git a/dsoftbus/build/config/mac/symbols.gni b/dsoftbus/build/config/mac/symbols.gni
new file mode 100644
index 0000000000000000000000000000000000000000..595478c08117f5995f11bdc80445e469f45826f5
--- /dev/null
+++ b/dsoftbus/build/config/mac/symbols.gni
@@ -0,0 +1,29 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+# This file declares arguments and configs that control whether dSYM debug
+# info is produced and whether build products are stripped.
+
+declare_args() {
+ # Produce dSYM files for targets that are configured to do so. dSYM
+ # generation is controlled globally as it is a linker output (produced via
+ # the //build/toolchain/mac/linker_driver.py. Enabling this will result in
+ # all shared library, loadable module, and executable targets having a dSYM
+ # generated.
+ enable_dsyms = is_official_build || using_sanitizer
+
+ # Strip symbols from linked targets by default. If this is enabled, the
+ # //build/config/mac:strip_all config will be applied to all linked targets.
+ # If custom stripping parameters are required, remove that config from a
+ # linked target and apply custom -Wcrl,strip flags. See
+ # //build/toolchain/mac/linker_driver.py for more information.
+ enable_stripping = is_official_build
+}
+
+# Save unstripped copies of targets with a ".unstripped" suffix. This is
+# useful to preserve the original output when enable_stripping=true but
+# we're not actually generating real dSYMs.
+save_unstripped_output = enable_stripping && !enable_dsyms
diff --git a/dsoftbus/build/config/mac/xcrun.py b/dsoftbus/build/config/mac/xcrun.py
new file mode 100755
index 0000000000000000000000000000000000000000..0e29287bcbb82246fe14d97d303d0a59d55373c6
--- /dev/null
+++ b/dsoftbus/build/config/mac/xcrun.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import subprocess
+import sys
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description='A script to execute a command via xcrun.')
+ parser.add_argument('--stamp', action='store', type=str,
+ help='Write a stamp file to this path on success.')
+ parser.add_argument('--developer_dir', required=False,
+ help='Path to Xcode.')
+ args, unknown_args = parser.parse_known_args()
+
+ if args.developer_dir:
+ os.environ['DEVELOPER_DIR'] = args.developer_dir
+
+ rv = subprocess.check_call(['xcrun'] + unknown_args)
+ if rv == 0 and args.stamp:
+ if os.path.exists(args.stamp):
+ os.unlink(args.stamp)
+ with open(args.stamp, 'w+') as fp:
+
+ sys.exit(rv)
diff --git a/dsoftbus/build/config/mingw/BUILD.gn b/dsoftbus/build/config/mingw/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..216319459ad4486c27b88066b9a7c3b119387e07
--- /dev/null
+++ b/dsoftbus/build/config/mingw/BUILD.gn
@@ -0,0 +1,43 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+assert(is_mingw)
+
+config("compiler") {
+ _target = [
+ "-target",
+ "x86_64-pc-windows-gnu",
+ ]
+
+ _mingw_flags = [
+ "-rtlib=compiler-rt",
+ "-stdlib=libc++",
+ "-lunwind",
+ "-lpthread",
+ "-Qunused-arguments",
+ ]
+
+ cflags = _target
+ asmflags = _target
+ ldflags = _target
+
+ cflags += _mingw_flags
+ asmflags += _mingw_flags
+ ldflags += _mingw_flags
+
+ cflags += [ "-fuse-ld=lld" ]
+
+ ldflags += [ "-fuse-ld=lld" ]
+
+ cflags += [ "-D__CUSTOM_SECURITY_LIBRARY" ]
+}
diff --git a/dsoftbus/build/config/ohos/BUILD.gn b/dsoftbus/build/config/ohos/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..aa5ac7cc6629ff4d50eb8ce5d6490c3956803733
--- /dev/null
+++ b/dsoftbus/build/config/ohos/BUILD.gn
@@ -0,0 +1,120 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/ohos/config.gni")
+
+import("//build/config/c++/c++.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+assert(is_ohos)
+
+# This is included by reference in the //build/config/compiler config that
+# is applied to all targets. It is here to separate out the logic that is
+# ohos-only.
+config("compiler") {
+ cflags = [
+ "-ffunction-sections",
+ "-fno-short-enums",
+ ]
+ defines = [
+ # The NDK has these things, but doesn't define the constants to say that it
+ # does. Define them here instead.
+ "HAVE_SYS_UIO_H",
+ ]
+
+ defines += [
+ "__MUSL__",
+ "_LIBCPP_HAS_MUSL_LIBC",
+ "__BUILD_LINUX_WITH_CLANG",
+ ]
+
+ ldflags = [
+ "-Wl,--no-undefined",
+ "-Wl,--exclude-libs=libunwind_llvm.a",
+ "-Wl,--exclude-libs=libc++_static.a",
+
+ # Don't allow visible symbols from libraries that contain
+ # assembly code with symbols that aren't hidden properly.
+ # http://crbug.com/448386
+ "-Wl,--exclude-libs=libvpx_assembly_arm.a",
+ ]
+
+ cflags += [ "--target=$abi_target" ]
+ include_dirs = [
+ "${musl_sysroot}/usr/include/${abi_target}",
+ "${clang_base_path}/include/c++/v1",
+ ]
+
+ ldflags += [ "--target=$abi_target" ]
+
+ # Assign any flags set for the C compiler to asmflags so that they are sent
+ # to the assembler.
+ asmflags = cflags
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is ohos-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ cflags_cc = []
+
+ defines = [
+ "__GNU_SOURCE=1", # Necessary for clone().
+ "CHROMIUM_CXX_TWEAK_INLINES", # Saves binary size.
+ ]
+
+ defines += [
+ "__MUSL__",
+ "_LIBCPP_HAS_MUSL_LIBC",
+ "__BUILD_LINUX_WITH_CLANG",
+ ]
+ ldflags = [ "-nostdlib" ]
+
+ libs = []
+
+ # arm builds of libc++ starting in NDK r12 depend on unwind.
+ if (current_cpu == "arm") {
+ libs += [ "unwind" ]
+ }
+
+ ldflags += [
+ "-L" +
+ rebase_path("${clang_base_path}/lib/${abi_target}/c++", root_build_dir),
+ "-L" + rebase_path("${musl_sysroot}/usr/lib/${abi_target}", root_build_dir),
+ "-L" + rebase_path("${clang_base_path}/lib/clang/10.0.1/lib/${abi_target}",
+ root_build_dir),
+ ]
+ ldflags += [ "-Wl,--dynamic-linker,/system/bin/ld-musl-${musl_arch}.so.1" ]
+
+ libs += [
+ rebase_path(libclang_rt_file),
+ "c",
+ "c++",
+ "c++abi",
+ ]
+
+ if (current_cpu == "arm" && arm_version == 6) {
+ libs += [ "atomic" ]
+ }
+
+ ldflags += [ "-Wl,--warn-shared-textrel" ]
+}
+
+config("executable_config") {
+ cflags = [ "-fPIE" ]
+ asmflags = [ "-fPIE" ]
+ ldflags = [ "-pie" ]
+}
+
+# Used for instrumented build to generate the orderfile.
+config("default_orderfile_instrumentation") {
+ if (use_order_profiling) {
+ cflags = [ "-finstrument-function-entry-bare" ]
+ if (use_thin_lto) {
+ ldflags = [ "-Wl,-u,__cyg_profile_func_enter_bare" ]
+ }
+ }
+}
diff --git a/dsoftbus/build/config/ohos/abi.gni b/dsoftbus/build/config/ohos/abi.gni
new file mode 100755
index 0000000000000000000000000000000000000000..57d1dcaae43278cca8fb0ad45fe2891d1ce906f0
--- /dev/null
+++ b/dsoftbus/build/config/ohos/abi.gni
@@ -0,0 +1,68 @@
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Logic separated out from config.gni so that it can be used by compiler.gni
+# without introducing a circular dependency.
+
+assert(is_ohos)
+
+declare_args() {
+ # Adds intrumentation to each function. Writes a file with the order that
+ # functions are called at startup.
+ use_order_profiling = false
+
+ # Only effective if use_order_profiling = true. When this is true,
+ # instrumentation switches from startup profiling after a delay, and
+ # then waits for a devtools memory dump request to dump all
+ # profiling information. When false, the same delay is used to switch from
+ # startup, and then after a second delay all profiling information is dumped.
+ devtools_instrumentation_dumping = false
+
+ # Builds secondary abi for APPs, supports build 32-bit arch as secondary
+ # abi in 64-bit Monochrome and WebView.
+ build_app_secondary_abi = true
+}
+
+assert(!devtools_instrumentation_dumping || use_order_profiling,
+ "devtools_instrumentation_dumping requires use_order_profiling")
+
+if (current_cpu == "x86") {
+ ohos_app_abi = "x86"
+} else if (current_cpu == "arm") {
+ import("//build/config/arm.gni")
+ if (arm_version < 7) {
+ ohos_app_abi = "armeabi"
+ } else {
+ ohos_app_abi = "armeabi-v7a"
+ }
+} else if (current_cpu == "x86_64") {
+ ohos_app_abi = "x86_64"
+} else if (current_cpu == "arm64") {
+ ohos_app_abi = "arm64-v8a"
+} else {
+ assert(false, "Unknown ABI: " + current_cpu)
+}
+
+if (target_cpu == "arm64" || target_cpu == "x86_64") {
+ ohos_64bit_target_cpu = true
+} else if (target_cpu == "arm" || target_cpu == "x86") {
+ ohos_64bit_target_cpu = false
+} else {
+ assert(false, "Unknown target CPU: $target_cpu")
+}
+
+# Intentionally do not define ohos_app_secondary_abi_cpu and
+# ohos_app_secondary_abi for 32-bit target_cpu, since they are not used.
+if (target_cpu == "arm64") {
+ ohos_secondary_abi_cpu = "arm"
+ ohos_app_secondary_abi = "armeabi-v7a"
+} else if (target_cpu == "x64") {
+ ohos_secondary_abi_cpu = "x86"
+ ohos_app_secondary_abi = "x86"
+}
+
+if (defined(ohos_secondary_abi_cpu)) {
+ ohos_secondary_abi_toolchain =
+ "//build/toolchain/ohos:ohos_clang_${ohos_secondary_abi_cpu}"
+}
diff --git a/dsoftbus/build/config/ohos/config.gni b/dsoftbus/build/config/ohos/config.gni
new file mode 100644
index 0000000000000000000000000000000000000000..072bce1da9ffd954c4ec320b98f86abb39831899
--- /dev/null
+++ b/dsoftbus/build/config/ohos/config.gni
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_ohos) {
+ import("//build/config/clang/clang.gni")
+ import("//build/misc/overrides/build.gni")
+ import("abi.gni")
+
+ import("//build/config/ohos/musl.gni")
+
+ default_base_libs_root = ""
+
+ declare_args() {
+ base_libs_root = default_base_libs_root
+ }
+
+ # Defines the name the ohos build gives to the current host CPU
+ # architecture, which is different than the names GN uses.
+ if (host_os == "linux") {
+ ohos_host_os = "linux"
+ } else if (host_os == "mac") {
+ ohos_host_os = "darwin"
+ } else {
+ assert(false, "Need toolchain support for your build OS.")
+ }
+
+ if (current_cpu == "arm") {
+ abi_target = "arm-linux-ohosmusl"
+ } else if (current_cpu == "x86") {
+ abi_target = ""
+ } else if (current_cpu == "arm64") {
+ abi_target = "aarch64-linux-ohosmusl"
+ } else if (current_cpu == "x86_64") {
+ abi_target = ""
+ } else {
+ assert(false, "Architecture not supported")
+ }
+
+ libclang_rt_file = "${clang_base_path}/lib/clang/10.0.1/lib/${abi_target}/libclang_rt.builtins.a"
+}
diff --git a/dsoftbus/build/config/ohos/copy_ex.gni b/dsoftbus/build/config/ohos/copy_ex.gni
new file mode 100755
index 0000000000000000000000000000000000000000..306c452cb8047bd3f07b6cedd7df341258d60870
--- /dev/null
+++ b/dsoftbus/build/config/ohos/copy_ex.gni
@@ -0,0 +1,67 @@
+# Copyright 2019 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/python.gni")
+
+# Copy a list of file into a destination directory. Potentially renaming
+# files are they are copied. This also ensures that symlinks are followed
+# during the copy (i.e. the symlinks are never copied, only their content).
+#
+# Variables:
+# dest: Destination directory path.
+# sources: List of source files or directories to copy to dest.
+# renaming_sources: Optional list of source file paths that will be renamed
+# during the copy operation. If provided, renaming_destinations is required.
+# renaming_destinations: Optional list of destination file paths, required
+# when renaming_sources is provided. Both lists should have the same size
+# and matching entries.
+# args: Optional. Additional arguments to the copy_ex.py script.
+#
+# The following variables have the usual GN meaning: data, deps, inputs,
+# outputs, testonly, visibility.
+#
+template("copy_ex") {
+ set_sources_assignment_filter([])
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data",
+ "deps",
+ "outputs",
+ "testonly",
+ "visibility",
+ ])
+ sources = []
+ if (defined(invoker.sources)) {
+ sources += invoker.sources
+ }
+ if (defined(invoker.inputs)) {
+ inputs = invoker.inputs
+ }
+
+ script = "//build/scripts/copy_ex.py"
+
+ args = [
+ "--dest",
+ rebase_path(invoker.dest, root_build_dir),
+ ]
+ rebased_sources = rebase_path(sources, root_build_dir)
+ args += [ "--files=$rebased_sources" ]
+
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+
+ if (defined(invoker.renaming_sources) &&
+ defined(invoker.renaming_destinations)) {
+ sources += invoker.renaming_sources
+ rebased_renaming_sources =
+ rebase_path(invoker.renaming_sources, root_build_dir)
+ args += [ "--renaming-sources=$rebased_renaming_sources" ]
+
+ renaming_destinations = invoker.renaming_destinations
+ args += [ "--renaming-destinations=$renaming_destinations" ]
+ }
+ }
+}
diff --git a/dsoftbus/build/config/ohos/musl.gni b/dsoftbus/build/config/ohos/musl.gni
new file mode 100644
index 0000000000000000000000000000000000000000..2468ca8d5cfe59dfab803a0f9c43c6b83582b571
--- /dev/null
+++ b/dsoftbus/build/config/ohos/musl.gni
@@ -0,0 +1,19 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if (use_musl){
+ musl_target_abi_name = "soft"
+ musl_target = "//third_party/musl:musl_libs"
+ musl_sysroot = get_label_info(musl_target, "target_out_dir")
+ import("//third_party/musl/musl_config.gni")
+}
\ No newline at end of file
diff --git a/dsoftbus/build/config/posix/BUILD.gn b/dsoftbus/build/config/posix/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..68ce5023224f5a9a5f38c0723c5b75c54fbe62cf
--- /dev/null
+++ b/dsoftbus/build/config/posix/BUILD.gn
@@ -0,0 +1,134 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/c++/c++.gni")
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/deps_revisions.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/sysroot.gni")
+import("//build/toolchain/toolchain.gni")
+
+assert(is_posix)
+
+group("posix") {
+ visibility = [ "//:optimize_gn_gen" ]
+}
+
+# This is included by reference in the //build/config/compiler:runtime_library
+# config that is applied to all targets. It is here to separate out the logic
+# that is Posix-only. Please see that target for advice on what should go in
+# :runtime_library vs. :compiler.
+config("runtime_library") {
+ asmflags = []
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ cflags_objc = []
+ cflags_objcc = []
+ defines = []
+ ldflags = []
+ lib_dirs = []
+ libs = []
+
+ if (use_custom_libcxx) {
+ if (!is_component_build) {
+ # Don't leak any symbols on a static build.
+ defines += [ "_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS" ]
+ if (!export_libcxxabi_from_executables) {
+ defines += [ "_LIBCXXABI_DISABLE_VISIBILITY_ANNOTATIONS" ]
+ }
+ }
+ cflags_cc += [
+ "-nostdinc++",
+ "-isystem" + rebase_path("$libcxx_prefix/include", root_build_dir),
+ "-isystem" + rebase_path("$libcxxabi_prefix/include", root_build_dir),
+ ]
+ defines += [
+ "CR_LIBCXX_REVISION=$libcxx_svn_revision",
+ "CR_LIBCXXABI_REVISION=$libcxxabi_svn_revision",
+ "_LIBCPP_ENABLE_NODISCARD",
+ ]
+
+ # Make sure we don't link against libc++ or libstdc++.
+ if (is_clang) {
+ # //build/config/ohos:runtime_library adds -nostdlib, which suppresses
+ # linking against all system libraries. -nostdlib++ would be redundant,
+ # and would generate an unused warning in this case.
+ if (!is_ohos) {
+ ldflags += [ "-nostdlib++" ]
+ }
+ } else {
+ # Gcc has a built-in abs() definition with default visibility.
+ # If it was not disabled, it would conflict with libc++'s abs()
+ # with hidden visibility.
+ cflags += [ "-fno-builtin-abs" ]
+
+ ldflags += [ "-nodefaultlibs" ]
+
+ # Unfortunately, there's no way to disable linking against just libc++
+ # (gcc doesn't have -notstdlib++:
+ # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83931); -nodefaultlibs
+ # removes all of the default libraries, so add back the ones that we need.
+ libs += [
+ "c",
+ "gcc_s",
+ "m",
+ "rt",
+ ]
+ }
+ }
+
+ if (!is_mac && !is_ios && sysroot != "") {
+ # Pass the sysroot to all C compiler variants, the assembler, and linker.
+ sysroot_flags = [ "--sysroot=" + rebase_path(sysroot, root_build_dir) ]
+ if (is_linux) {
+ # This is here so that all files get recompiled after a sysroot roll and
+ # when turning the sysroot on or off. (defines are passed via the command
+ # line, and build system rebuilds things when their commandline
+ # changes). Nothing should ever read this define.
+ sysroot_hash =
+ exec_script("//build/linux/sysroot_scripts/install-sysroot.py",
+ [ "--print-hash=$current_cpu" ],
+ "trim string",
+ [ "//build/linux/sysroot_scripts/sysroots.json" ])
+ defines += [ "CR_SYSROOT_HASH=$sysroot_hash" ]
+ }
+ asmflags += sysroot_flags
+
+ link_sysroot_flags =
+ [ "--sysroot=" + rebase_path(link_sysroot, root_build_dir) ]
+ ldflags += link_sysroot_flags
+
+ # When use_custom_libcxx=true, some -isystem flags get passed to
+ # cflags_cc to set up libc++ include paths. We want to make sure
+ # the sysroot includes take lower precedence than the libc++
+ # ones, so they must appear later in the command line. However,
+ # the gn reference states "These variant-specific versions of
+ # cflags* will be appended on the compiler command line after
+ # 'cflags'." Because of this, we must set the sysroot flags for
+ # all cflags variants instead of using 'cflags' directly.
+ cflags_c += sysroot_flags
+ cflags_cc += sysroot_flags
+ cflags_objc += sysroot_flags
+ cflags_objcc += sysroot_flags
+
+ # Need to get some linker flags out of the sysroot.
+ ld_paths =
+ exec_script("sysroot_ld_path.py",
+ [
+ rebase_path("//build/misc/linux/sysroot_ld_path.sh",
+ root_build_dir),
+ rebase_path(link_sysroot),
+ ],
+ "list lines")
+ foreach(ld_path, ld_paths) {
+ ld_path = rebase_path(ld_path, root_build_dir)
+ ldflags += [
+ "-L" + ld_path,
+ "-Wl,-rpath-link=" + ld_path,
+ ]
+ }
+ }
+}
diff --git a/dsoftbus/build/config/posix/sysroot_ld_path.py b/dsoftbus/build/config/posix/sysroot_ld_path.py
new file mode 100755
index 0000000000000000000000000000000000000000..aa6faa770e5d02255c8071079419a7e97967d92d
--- /dev/null
+++ b/dsoftbus/build/config/posix/sysroot_ld_path.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+ print("Need two arguments")
+ sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
+result = result.decode().replace(" ", "\n")
+if result != "":
+ print(result)
diff --git a/dsoftbus/build/config/pycache/pycache.config b/dsoftbus/build/config/pycache/pycache.config
new file mode 100644
index 0000000000000000000000000000000000000000..3ed753f63f7c968e8298f20be66266e66e92ee79
--- /dev/null
+++ b/dsoftbus/build/config/pycache/pycache.config
@@ -0,0 +1,15 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+PYCACHE_DIR=
diff --git a/dsoftbus/build/config/python.gni b/dsoftbus/build/config/python.gni
new file mode 100755
index 0000000000000000000000000000000000000000..79ddcb5640d427f4376d68f1272380985269c319
--- /dev/null
+++ b/dsoftbus/build/config/python.gni
@@ -0,0 +1,166 @@
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a group() that lists Python sources as |data|.
+# Having such targets serves two purposes:
+# 1) Causes files to be included in runtime_deps, so that they are uploaded to
+# swarming when running tests remotely.
+# 2) Causes "gn analyze" to know about all Python inputs so that tests will be
+# re-run when relevant Python files change.
+#
+# All non-trivial Python scripts should use a "pydeps" file to track their
+# sources. To create a .pydep file for a target in //example:
+#
+# build/print_python_deps.py \
+# --root example \
+# --output example/$target_name.pydeps \
+# path/to/your/script.py
+#
+# Keep the .pydep file up-to-date by adding to //PRESUBMIT.py under one of:
+# _GENERIC_PYDEPS_FILES
+#
+# Variables
+# pydeps_file: Path to .pydeps file to read sources from (optional).
+# data: Additional files to include in data. E.g. non-.py files needed by the
+# library, or .py files that are conditionally / lazily imported.
+#
+# Example
+# python_library("my_library_py") {
+# pydeps_file = "my_library.pydeps"
+# data = [ "foo.dat" ]
+# }
+template("python_library") {
+ group(target_name) {
+ forward_variables_from(invoker,
+ [
+ "data_deps",
+ "deps",
+ "testonly",
+ "visibility",
+ ])
+
+ if (defined(invoker.pydeps_file)) {
+ _py_files = read_file(invoker.pydeps_file, "list lines")
+
+ # Filter out comments.
+ set_sources_assignment_filter([ "#*" ])
+ sources = _py_files
+
+ # Even though the .pydep file is not used at runtime, it must be added
+ # so that "gn analyze" will mark the target as changed when .py files
+ # are removed but none are added or modified.
+ data = sources + [ invoker.pydeps_file ]
+ } else {
+ data = []
+ }
+ if (defined(invoker.data)) {
+ data += invoker.data
+ }
+ }
+}
+
+# A template used for actions that execute a Python script, which has an
+# associated .pydeps file. In other words:
+#
+# - This is very similar to just an action(), except that |script| must point
+# to a Python script (e.g. "//build/.../foo.py") that has a corresponding
+# .pydeps file in the source tree (e.g. "//build/.../foo.pydeps").
+#
+# - The .pydeps file contains a list of python dependencies (imports really)
+# and is generated _manually_ by using a command like:
+#
+# build/print_python_deps.py --inplace build/gyp/foo.py
+#
+template("action_with_pydeps") {
+ # Read the .pydeps file now. Note that this is done every time this
+ # template is called, but benchmarking doesn't show any impact on overall
+ # 'gn gen' speed anyway.
+ _pydeps_file = invoker.script + "deps"
+ _pydeps_raw = read_file(_pydeps_file, "list lines")
+
+ # Filter out comments.
+ # This is a bit convoluted to preserve the value of sources if defined.
+ _old_sources = []
+ if (defined(sources)) {
+ _old_sources = sources
+ }
+ set_sources_assignment_filter([ "#*" ])
+ sources = _pydeps_raw
+ _pydeps = sources
+ set_sources_assignment_filter([])
+ sources = _old_sources
+
+ action(target_name) {
+ # Forward all variables. Ensure that testonly and visibility are forwarded
+ # explicitly, since this performs recursive scope lookups, which is
+ # required to ensure their definition from scopes above the caller are
+ # properly handled. All other variables are forwarded with "*", which
+ # doesn't perform recursive lookups at all. See https://crbug.com/862232
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ forward_variables_from(invoker,
+ "*",
+ [
+ "testonly",
+ "visibility",
+ ])
+
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ # Dependencies are listed relative to the script directory, but inputs
+ # expects paths that are relative to the current BUILD.gn
+ _script_dir = get_path_info(script, "dir")
+ inputs += rebase_path(_pydeps, ".", _script_dir)
+ }
+}
+
+template("action_foreach_with_pydeps") {
+ _pydeps_file = invoker.script + "deps"
+ _pydeps_raw = read_file(_pydeps_file, "list lines")
+
+ # Filter out comments.
+ # This is a bit convoluted to preserve the value of sources if defined.
+ _old_sources = []
+ if (defined(sources)) {
+ _old_sources = sources
+ }
+ set_sources_assignment_filter([ "#*" ])
+ sources = _pydeps_raw
+ _pydeps = sources
+ set_sources_assignment_filter([])
+ sources = _old_sources
+
+ action_foreach(target_name) {
+ # Forward all variables. Ensure that testonly and visibility are forwarded
+ # explicitly, since this performs recursive scope lookups, which is
+ # required to ensure their definition from scopes above the caller are
+ # properly handled. All other variables are forwarded with "*", which
+ # doesn't perform recursive lookups at all. See https://crbug.com/862232
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ forward_variables_from(invoker,
+ "*",
+ [
+ "testonly",
+ "visibility",
+ ])
+
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ # Dependencies are listed relative to the script directory, but inputs
+ # expects paths that are relative to the current BUILD.gn
+ _script_dir = get_path_info(script, "dir")
+ inputs += rebase_path(_pydeps, ".", _script_dir)
+ }
+}
diff --git a/dsoftbus/build/config/sanitizers/BUILD.gn b/dsoftbus/build/config/sanitizers/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..382003eed2e59aed8ec67cf4cb25e1fe122c8176
--- /dev/null
+++ b/dsoftbus/build/config/sanitizers/BUILD.gn
@@ -0,0 +1,757 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/misc/overrides/build.gni")
+import("//build/ohos_var.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_ios) {
+ import("//build/config/ios/ios_sdk.gni")
+}
+
+# Contains the dependencies needed for sanitizers to link into executables and
+# shared_libraries.
+group("deps") {
+ if (using_sanitizer && !is_mingw) {
+ public_configs = [
+ ":sanitizer_options_link_helper",
+
+ # Even when a target removes default_sanitizer_flags, it may be depending
+ # on a library that did not remove default_sanitizer_flags. Thus, we need
+ # to add the ldflags here as well as in default_sanitizer_flags.
+ ":default_sanitizer_ldflags",
+ ]
+ if (use_musl) {
+ public_configs -= [ ":sanitizer_options_link_helper" ]
+ public_configs -= [ ":default_sanitizer_ldflags" ]
+ }
+ deps = [ ":options_sources" ]
+ if (is_win) {
+ exe = ".exe"
+ } else {
+ exe = ""
+ }
+ data = [
+ "//tools/valgrind/asan/",
+ "$clang_base_path/bin/llvm-symbolizer${exe}",
+ ]
+ if (use_prebuilt_instrumented_libraries ||
+ use_locally_built_instrumented_libraries) {
+ deps += [ "//third_party/instrumented_libraries:deps" ]
+ }
+
+ # ASAN is supported on iOS but the runtime library depends on the compiler
+ # used (Chromium version of clang versus Xcode version of clang). Only copy
+ # the ASAN runtime on iOS if building with Chromium clang.
+ if (is_win || is_mac || (is_ios && !use_xcode_clang)) {
+ data_deps = [ ":copy_asan_runtime" ]
+ }
+ if (is_mac || (is_ios && !use_xcode_clang)) {
+ public_deps = [ ":asan_runtime_bundle_data" ]
+ }
+ }
+}
+
+if ((is_mac || is_win || (is_ios && !use_xcode_clang) || is_ohos) &&
+ using_sanitizer) {
+ if (is_mac) {
+ _clang_rt_dso_path = "darwin/libclang_rt.asan_osx_dynamic.dylib"
+ } else if (is_ios) {
+ _clang_rt_dso_path = "darwin/libclang_rt.asan_iossim_dynamic.dylib"
+ } else if (is_win && target_cpu == "x86") {
+ _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-i386.dll"
+ } else if (is_win && target_cpu == "x64") {
+ _clang_rt_dso_path = "windows/clang_rt.asan_dynamic-x86_64.dll"
+ } else if (is_ohos) {
+ if (target_cpu == "arm64") {
+ _clang_rt_dso_path = "aarch64-linux-ohosmusl/libclang_rt.asan.so"
+ } else if (target_cpu == "arm") {
+ _clang_rt_dso_path = "arm-linux-ohosmusl/libclang_rt.asan.so"
+ } else if (target_cpu == "x86_64") {
+ _clang_rt_dso_path = ""
+ }
+ }
+
+ _clang_rt_dso_full_path =
+ "$clang_base_path/lib/clang/$clang_version/lib/$_clang_rt_dso_path"
+
+ if (!is_ios) {
+ copy("copy_asan_runtime") {
+ set_sources_assignment_filter([])
+ sources = [ _clang_rt_dso_full_path ]
+ outputs = [ "$root_out_dir/{{source_file_part}}" ]
+ }
+ } else {
+ # On iOS, the runtime library need to be code signed (adhoc signature)
+ # starting with Xcode 8, so use an action instead of a copy on iOS.
+ action("copy_asan_runtime") {
+ script = "//build/config/ios/codesign.py"
+ sources = [ _clang_rt_dso_full_path ]
+ outputs = [ "$root_out_dir/" + get_path_info(sources[0], "file") ]
+ args = [
+ "code-sign-file",
+ "--identity=" + ios_code_signing_identity,
+ "--output=" + rebase_path(outputs[0], root_build_dir),
+ rebase_path(sources[0], root_build_dir),
+ ]
+ }
+ }
+
+ if (is_mac || is_ios) {
+ bundle_data("asan_runtime_bundle_data") {
+ sources = get_target_outputs(":copy_asan_runtime")
+ outputs = [ "{{bundle_executable_dir}}/{{source_file_part}}" ]
+ public_deps = [ ":copy_asan_runtime" ]
+ }
+ }
+}
+
+config("sanitizer_options_link_helper") {
+ if (is_mac || is_ios) {
+ ldflags = [ "-Wl,-U,_sanitizer_options_link_helper" ]
+ } else if (!is_win && !is_mingw) {
+ ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
+ }
+}
+
+static_library("options_sources") {
+ # This is a static_library instead of a source_set, as it shouldn't be
+ # unconditionally linked into targets.
+ visibility = [
+ ":deps",
+ "//:gn_visibility",
+ ]
+ sources = [ "//build/misc/sanitizers/sanitizer_options.cc" ]
+
+ # Don't compile this target with any sanitizer code. It can be called from
+ # the sanitizer runtimes, so instrumenting these functions could cause
+ # recursive calls into the runtime if there is an error.
+ configs -= [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+ if (is_asan) {
+ if (!defined(asan_suppressions_file)) {
+ asan_suppressions_file = "//build/misc/sanitizers/asan_suppressions.cc"
+ }
+ sources += [ asan_suppressions_file ]
+ }
+
+ if (is_lsan) {
+ if (!defined(lsan_suppressions_file)) {
+ lsan_suppressions_file = "//build/misc/sanitizers/lsan_suppressions.cc"
+ }
+ sources += [ lsan_suppressions_file ]
+ }
+
+ if (is_tsan) {
+ if (!defined(tsan_suppressions_file)) {
+ tsan_suppressions_file = "//build/misc/sanitizers/tsan_suppressions.cc"
+ }
+ sources += [ tsan_suppressions_file ]
+ }
+}
+
+# Applies linker flags necessary when either :deps or :default_sanitizer_flags
+# are used.
+config("default_sanitizer_ldflags") {
+ visibility = [
+ ":default_sanitizer_flags",
+ ":deps",
+ ]
+
+ if (is_posix && !is_mingw) {
+ ldflags = []
+ if (is_asan) {
+ if (!use_musl) {
+ ldflags += [ "-fsanitize=address" ]
+ }
+ if (is_mac) {
+ # https://crbug.com/708707
+ ldflags += [ "-fno-sanitize-address-use-after-scope" ]
+ } else {
+ ldflags += [ "-fsanitize-address-use-after-scope" ]
+ }
+ }
+ if (is_lsan) {
+ ldflags += [ "-fsanitize=leak" ]
+ }
+ if (is_tsan) {
+ ldflags += [ "-fsanitize=thread" ]
+ }
+ if (is_msan) {
+ ldflags += [ "-fsanitize=memory" ]
+ }
+ if (is_ubsan || is_ubsan_security) {
+ ldflags += [ "-fsanitize=undefined" ]
+ }
+ if (is_ubsan_null) {
+ ldflags += [ "-fsanitize=null" ]
+ }
+ if (is_ubsan_vptr) {
+ ldflags += [ "-fsanitize=vptr" ]
+ }
+ if (is_safestack) {
+ ldflags += [ "-fsanitize=safe-stack" ]
+ }
+
+ if (use_sanitizer_coverage) {
+ if (use_libfuzzer && !is_mac) {
+ #ldflags += [ "-fsanitize=fuzzer-no-link" ]
+ ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
+ } else {
+ ldflags += [ "-fsanitize-coverage=$sanitizer_coverage_flags" ]
+ }
+ }
+
+ if (is_cfi && current_toolchain == default_toolchain) {
+ ldflags += [ "-fsanitize=cfi-vcall" ]
+ if (use_cfi_cast) {
+ ldflags += [
+ "-fsanitize=cfi-derived-cast",
+ "-fsanitize=cfi-unrelated-cast",
+ ]
+ }
+ if (use_cfi_icall) {
+ ldflags += [ "-fsanitize=cfi-icall" ]
+ }
+ if (use_cfi_diag) {
+ ldflags += [ "-fno-sanitize-trap=cfi" ]
+ if (use_cfi_recover) {
+ ldflags += [ "-fsanitize-recover=cfi" ]
+ }
+ }
+ }
+ } else if (is_win) {
+ # Windows directly calls link.exe instead of the compiler driver when
+ # linking. Hence, pass the runtime libraries instead of -fsanitize=address
+ # or -fsanitize=fuzzer.
+ if (is_asan && is_component_build) {
+ # In the static-library build, ASan libraries are different for
+ # executables and dlls, see link_executable and link_shared_library below.
+ # This here handles only the component build.
+ if (target_cpu == "x64") {
+ # Windows 64-bit.
+ libs = [
+ "clang_rt.asan_dynamic-x86_64.lib",
+ "clang_rt.asan_dynamic_runtime_thunk-x86_64.lib",
+ ]
+ } else {
+ assert(target_cpu == "x86", "WinASan unsupported architecture")
+ libs = [
+ "clang_rt.asan_dynamic-i386.lib",
+ "clang_rt.asan_dynamic_runtime_thunk-i386.lib",
+ ]
+ }
+ }
+ if (use_libfuzzer) {
+ assert(target_cpu == "x64", "LibFuzzer unsupported architecture")
+ assert(!is_component_build,
+ "LibFuzzer only supports non-component builds on Windows")
+
+ # Incremental linking causes padding that messes up SanitizerCoverage.
+ # Don't do it.
+ ldflags = [ "/INCREMENTAL:NO" ]
+ libs = [ "clang_rt.fuzzer_no_main-x86_64.lib" ]
+ }
+ }
+}
+
+config("common_sanitizer_flags") {
+ cflags = []
+
+ if (using_sanitizer && !is_mingw) {
+ assert(is_clang, "sanitizers only supported with clang")
+ assert(!is_official_build, "sanitizers not supported in official builds")
+
+ cflags += [
+ # Column info in debug data confuses Visual Studio's debugger, so don't
+ # use this by default. However, clusterfuzz needs it for good
+ # attribution of reports to CLs, so turn it on there.
+ "-gcolumn-info",
+ ]
+
+ # Frame pointers are controlled in //build/config/compiler:default_stack_frames
+ }
+}
+
+config("asan_flags") {
+ cflags = []
+ if (is_asan && !is_mingw) {
+ cflags += [ "-fsanitize=address" ]
+ if (!is_mac) {
+ cflags += [ "-fsanitize-address-use-after-scope" ]
+ } else {
+ # https://crbug.com/708707
+ cflags += [ "-fno-sanitize-address-use-after-scope" ]
+ }
+ if (!asan_globals) {
+ cflags += [
+ "-mllvm",
+ "-asan-globals=0",
+ ]
+ }
+ }
+}
+
+config("link_executable") {
+ if (is_asan && is_win && !is_component_build) {
+ if (target_cpu == "x64") {
+ ldflags = [ "-wholearchive:clang_rt.asan-x86_64.lib" ]
+ } else {
+ assert(target_cpu == "x86", "WinASan unsupported architecture")
+ ldflags = [ "-wholearchive:clang_rt.asan-i386.lib" ]
+ }
+ } else if (is_asan && is_ohos) {
+ libs = [ "$_clang_rt_dso_full_path" ]
+ }
+}
+
+config("link_shared_library") {
+ if (is_asan && is_win && !is_component_build) {
+ if (target_cpu == "x64") {
+ libs = [ "clang_rt.asan_dll_thunk-x86_64.lib" ]
+ } else {
+ assert(target_cpu == "x86", "WinASan unsupported architecture")
+ libs = [ "clang_rt.asan_dll_thunk-i386.lib" ]
+ }
+ } else if (is_asan && is_ohos) {
+ libs = [ "$_clang_rt_dso_full_path" ]
+ }
+}
+
+config("cfi_flags") {
+ cflags = []
+ if (is_cfi && current_toolchain == default_toolchain) {
+ if (!defined(cfi_blocklist_path)) {
+ cfi_blocklist_path =
+ rebase_path("//tools/cfi/blocklist.txt", root_build_dir)
+ }
+ cflags += [
+ "-fsanitize=cfi-vcall",
+ "-fsanitize-blacklist=$cfi_blocklist_path",
+ ]
+
+ if (use_cfi_cast) {
+ cflags += [
+ "-fsanitize=cfi-derived-cast",
+ "-fsanitize=cfi-unrelated-cast",
+ ]
+ }
+
+ if (use_cfi_icall) {
+ cflags += [ "-fsanitize=cfi-icall" ]
+ }
+
+ if (use_cfi_diag) {
+ cflags += [ "-fno-sanitize-trap=cfi" ]
+ if (is_win) {
+ cflags += [
+ "/Oy-",
+ "/Ob0",
+ ]
+ } else {
+ cflags += [
+ "-fno-inline-functions",
+ "-fno-inline",
+ "-fno-omit-frame-pointer",
+ "-O1",
+ ]
+ }
+ if (use_cfi_recover) {
+ cflags += [ "-fsanitize-recover=cfi" ]
+ }
+ }
+ }
+}
+
+# crbug.com/785442: Fix cfi-icall failures for code that casts pointer argument
+# types in function pointer type signatures.
+config("cfi_icall_generalize_pointers") {
+ if (is_clang && is_cfi && use_cfi_icall) {
+ cflags = [ "-fsanitize-cfi-icall-generalize-pointers" ]
+ }
+}
+
+config("cfi_icall_disable") {
+ if (is_clang && is_cfi && use_cfi_icall) {
+ cflags = [ "-fno-sanitize=cfi-icall" ]
+ }
+}
+
+config("coverage_flags") {
+ cflags = []
+ if (use_sanitizer_coverage) {
+ # Used by sandboxing code to allow coverage dump to be written on the disk.
+ defines = [ "SANITIZER_COVERAGE" ]
+
+ if (use_libfuzzer && !is_mac) {
+ #cflags += [ "-fsanitize=fuzzer-no-link" ]
+ cflags += [
+ "-fsanitize-coverage=$sanitizer_coverage_flags",
+ "-mllvm",
+ "-sanitizer-coverage-prune-blocks=1",
+ ]
+ } else {
+ cflags += [
+ "-fsanitize-coverage=$sanitizer_coverage_flags",
+ "-mllvm",
+ "-sanitizer-coverage-prune-blocks=1",
+ ]
+ if (current_cpu == "arm") {
+ # http://crbug.com/517105
+ cflags += [
+ "-mllvm",
+ "-sanitizer-coverage-block-threshold=0",
+ ]
+ }
+ }
+ }
+}
+
+config("lsan_flags") {
+ if (is_lsan) {
+ cflags = [ "-fsanitize=leak" ]
+ }
+}
+
+config("msan_flags") {
+ if (is_msan) {
+ assert(is_linux, "msan only supported on linux x86_64")
+ if (!defined(msan_blocklist_path)) {
+ msan_blocklist_path =
+ rebase_path("//tools/msan/blocklist.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=memory",
+ "-fsanitize-memory-track-origins=$msan_track_origins",
+ "-fsanitize-blacklist=$msan_blocklist_path",
+ ]
+ }
+}
+
+config("safestack_flags") {
+ if (is_safestack) {
+ cflags = [ "-fsanitize=safe-stack" ]
+ }
+}
+
+config("tsan_flags") {
+ if (is_tsan) {
+ assert(is_linux, "tsan only supported on linux x86_64")
+ if (!defined(tsan_blocklist_path)) {
+ tsan_blocklist_path =
+ rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=thread",
+ "-fsanitize-blacklist=$tsan_blocklist_path",
+ ]
+ }
+}
+
+config("ubsan_flags") {
+ cflags = []
+ if (is_ubsan) {
+ if (!defined(ubsan_blocklist_path)) {
+ ubsan_blocklist_path =
+ rebase_path("//tools/ubsan/blocklist.txt", root_build_dir)
+ }
+ cflags += [
+ # Yasm dies with an "Illegal instruction" error when bounds checking is
+ # enabled. See http://crbug.com/489901
+ # "-fsanitize=bounds",
+ "-fsanitize=float-divide-by-zero",
+ "-fsanitize=integer-divide-by-zero",
+ "-fsanitize=null",
+ "-fsanitize=object-size",
+ "-fsanitize=pointer-overflow",
+ "-fsanitize=return",
+ "-fsanitize=returns-nonnull-attribute",
+ "-fsanitize=shift-exponent",
+ "-fsanitize=signed-integer-overflow",
+ "-fsanitize=unreachable",
+ "-fsanitize=vla-bound",
+ "-fsanitize-blacklist=$ubsan_blocklist_path",
+ ]
+
+ # Chromecast ubsan builds fail to compile with these
+ # experimental flags, so only add them to non-chromecast ubsan builds.
+ if (!is_chromecast) {
+ cflags += [
+ # Employ the experimental PBQP register allocator to avoid slow
+ # compilation on files with too many basic blocks.
+ # See http://crbug.com/426271.
+ "-mllvm",
+ "-regalloc=pbqp",
+
+ # Speculatively use coalescing to slightly improve the code generated
+ # by PBQP regallocator. May increase compile time.
+ "-mllvm",
+ "-pbqp-coalescing",
+ ]
+ }
+ }
+}
+
+config("ubsan_no_recover") {
+ if (is_ubsan_no_recover) {
+ cflags = [ "-fno-sanitize-recover=undefined" ]
+ }
+}
+
+config("ubsan_security_flags") {
+ if (is_ubsan_security) {
+ if (!defined(ubsan_security_blocklist_path)) {
+ ubsan_security_blocklist_path =
+ rebase_path("//tools/ubsan/security_blocklist.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=function",
+ "-fsanitize=pointer-overflow",
+ "-fsanitize=shift",
+ "-fsanitize=signed-integer-overflow",
+ "-fsanitize=vla-bound",
+ "-fsanitize=vptr",
+ "-fsanitize-blacklist=$ubsan_security_blocklist_path",
+ ]
+ }
+}
+
+config("ubsan_null_flags") {
+ if (is_ubsan_null) {
+ cflags = [ "-fsanitize=null" ]
+ }
+}
+
+config("ubsan_vptr_flags") {
+ if (is_ubsan_vptr) {
+ if (!defined(ubsan_vptr_blocklist_path)) {
+ ubsan_vptr_blocklist_path =
+ rebase_path("//tools/ubsan/vptr_blocklist.txt", root_build_dir)
+ }
+ cflags = [
+ "-fsanitize=vptr",
+ "-fsanitize-blacklist=$ubsan_vptr_blocklist_path",
+ ]
+ }
+}
+
+config("fuzzing_build_mode") {
+ if (use_fuzzing_engine && optimize_for_fuzzing) {
+ defines = [ "FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION" ]
+ }
+}
+
+all_sanitizer_configs = [
+ ":common_sanitizer_flags",
+ ":coverage_flags",
+ ":default_sanitizer_ldflags",
+ ":asan_flags",
+ ":cfi_flags",
+ ":lsan_flags",
+ ":msan_flags",
+ ":safestack_flags",
+ ":tsan_flags",
+ ":ubsan_flags",
+ ":ubsan_no_recover",
+ ":ubsan_null_flags",
+ ":ubsan_security_flags",
+ ":ubsan_vptr_flags",
+ ":fuzzing_build_mode",
+]
+
+# This config is applied by default to all targets. It sets the compiler flags
+# for sanitizer usage, or, if no sanitizer is set, does nothing.
+#
+# This needs to be in a separate config so that targets can opt out of
+# sanitizers (by removing the config) if they desire. Even if a target
+# removes this config, executables & shared libraries should still depend on
+# :deps if any of their dependencies have not opted out of sanitizers.
+# Keep this list in sync with default_sanitizer_flags_but_ubsan_vptr.
+config("default_sanitizer_flags") {
+ configs = all_sanitizer_configs
+}
+
+# This config is equivalent to default_sanitizer_flags, but excludes ubsan_vptr.
+# This allows to selectively disable ubsan_vptr, when needed. In particular,
+# if some third_party code is required to be compiled without rtti, which
+# is a requirement for ubsan_vptr.
+config("default_sanitizer_flags_but_ubsan_vptr") {
+ configs = all_sanitizer_configs - [ ":ubsan_vptr_flags" ]
+}
+
+config("default_sanitizer_flags_but_coverage") {
+ configs = all_sanitizer_configs - [ ":coverage_flags" ]
+}
+
+# This config is used by parts of code that aren't targeted in fuzzers and
+# therefore don't need coverage instrumentation and possibly won't need
+# sanitizer instrumentation either. The config also tells the compiler to
+# perform additional optimizations on the configured code and ensures that
+# linking it to the rest of the binary which is instrumented with sanitizers
+# works. The config only does anything if the build is a fuzzing build.
+config("not_fuzzed") {
+ if (use_fuzzing_engine) {
+ # Since we aren't instrumenting with coverage, code size is less of a
+ # concern, so use a more aggressive optimization level than
+ # optimize_for_fuzzing (-O1). When given multiple optimization flags, clang
+ # obeys the last one, so as long as this flag comes after -O1, it should work.
+ # Since this config will always be depended on after
+ # "//build/config/compiler:default_optimization" (which adds -O1 when
+ # optimize_for_fuzzing is true), -O2 should always be the second flag. Even
+ # though this sounds fragile, it isn't a big deal if it breaks, since proto
+ # fuzzers will still work, they will just be slightly slower.
+ cflags = [ "-O2" ]
+
+ # We need to include this config when we remove default_sanitizer_flags or
+ # else there will be linking errors. We would remove default_sanitizer_flags
+ # here as well, but gn doesn't permit this.
+ if (!is_msan) {
+ # We don't actually remove sanitization when MSan is being used so there
+ # is no need to add default_sanitizer_ldflags in that case
+ configs = [ ":default_sanitizer_ldflags" ]
+ }
+ }
+}
+
+config("sanitizer_trap_all_flags") {
+ cflags = [
+ "-fsanitize-trap=all",
+ "-ftrap-function=abort",
+ ]
+ ldflags = cflags
+}
+
+config("cfi_config") {
+ _cfi_blocklist_path = "//build/config/sanitizers/cfi_blocklist.txt"
+
+ configs = [
+ "//build/config/gcc:symbol_visibility_default",
+ ":sanitizer_trap_all_flags",
+ ]
+ cflags = [
+ "-flto",
+ "-fsanitize=cfi",
+ "-fsanitize-cfi-cross-dso",
+ "-fsanitize-blacklist=" + rebase_path(_cfi_blocklist_path, root_build_dir),
+ ]
+
+ ldflags = [
+ "-flto",
+ "-fsanitize-cfi-cross-dso",
+ "-fsanitize=cfi",
+ "-Wl,-plugin-opt,O1",
+ ]
+}
+
+config("shadow_call_stack_config") {
+ if (target_cpu == "arm64") {
+ cflags = [
+ # See https://clang.llvm.org/docs/ShadowCallStack.html
+ "-fsanitize=shadow-call-stack",
+ ]
+ ldflags = cflags
+ configs = [ ":sanitizer_trap_all_flags" ]
+ }
+}
+
+template("config_plus_compiler_rt") {
+ forward_variables_from(invoker, [ "minimal_rt_lib_names" ])
+ not_needed([ "minimal_rt_lib_names" ])
+
+ _clang_rt_dso_paths = []
+ if ((host_os == "linux" || host_os == "mac") && target_os == "ohos" &&
+ !is_mingw) {
+ _clang_rt_libs_dir = "$clang_base_path/lib64/clang/10.0.1/lib/"
+
+ _dso_names = []
+ foreach(dso_name, minimal_rt_lib_names) {
+ # Add runtime library support
+ if (target_cpu == "arm64") {
+ _dso_names += [ "aarch64-linux-ohosmusl/libclang_rt.${dso_name}.a" ]
+ } else if (target_cpu == "arm") {
+ _dso_names += [ "arm-linux-ohosmusl/libclang_rt.${dso_name}.a" ]
+ } else if (target_cpu == "x86_64") {
+ _dso_names += []
+ }
+ }
+
+ foreach(rt_lib, _dso_names) {
+ _clang_rt_dso_paths += [ "$_clang_rt_libs_dir/${rt_lib}" ]
+ }
+ }
+
+ config(target_name) {
+ forward_variables_from(invoker,
+ [
+ "cflags",
+ "cflags_cc",
+ "asmflags",
+ "ldflags",
+ "libs",
+ "configs",
+ ])
+ if (defined(libs)) {
+ libs += _clang_rt_dso_paths
+ } else {
+ libs = _clang_rt_dso_paths
+ }
+
+ foreach(dso, _clang_rt_dso_paths) {
+ if (defined(ldflags)) {
+ ldflags += [ "-Wl,--exclude-libs,${dso}" ]
+ } else {
+ ldflags = [ "-Wl,--exclude-libs,${dso}" ]
+ }
+ }
+ }
+}
+
+config_plus_compiler_rt("scudo_config") {
+ cflags = [ "-fsanitize=scudo" ]
+ configs = [ ":sanitizer_trap_all_flags" ]
+ minimal_rt_lib_names = [ "scudo_minimal" ]
+}
+
+config_plus_compiler_rt("undefined_behavior_sanitize_config") {
+ cflags = [
+ "-fsanitize=bool,integer-divide-by-zero,return,returns-nonnull-attribute,shift-exponent,unreachable,vla-bound",
+ "-fsanitize-minimal-runtime",
+ "-fno-sanitize-trap=integer,undefined",
+ "-fno-sanitize-recover=integer,undefined",
+ "-fno-sanitize=implicit-integer-sign-change",
+ ]
+ configs = [ ":sanitizer_trap_all_flags" ]
+ minimal_rt_lib_names = [
+ "ubsan_standalone",
+ "ubsan_minimal",
+ ]
+}
+
+config_plus_compiler_rt("boundary_sanitize_config") {
+ cflags = [
+ "-fsanitize=bounds",
+ "-fsanitize-minimal-runtime",
+ "-fno-sanitize-trap=integer,undefined",
+ "-fno-sanitize-recover=integer,undefined",
+ "-fno-sanitize=implicit-integer-sign-change",
+ ]
+ configs = [ ":sanitizer_trap_all_flags" ]
+ minimal_rt_lib_names = [ "ubsan_minimal" ]
+}
+
+config_plus_compiler_rt("integer_overflow_config") {
+ _integer_overflow_blocklist = "./integer_overflow_blocklist.txt"
+ cflags = [
+ "-fsanitize-blacklist=" +
+ rebase_path(_integer_overflow_blocklist, root_build_dir),
+ "-fsanitize=unsigned-integer-overflow,signed-integer-overflow",
+ "-fsanitize-minimal-runtime",
+ "-fno-sanitize-trap=integer,undefined",
+ "-fno-sanitize-recover=integer,undefined",
+ ]
+ configs = [ ":sanitizer_trap_all_flags" ]
+ minimal_rt_lib_names = [ "ubsan_minimal" ]
+}
diff --git a/dsoftbus/build/config/sanitizers/cfi_blocklist.txt b/dsoftbus/build/config/sanitizers/cfi_blocklist.txt
new file mode 100644
index 0000000000000000000000000000000000000000..644b07866434df0594fca7e5577cfea196121149
--- /dev/null
+++ b/dsoftbus/build/config/sanitizers/cfi_blocklist.txt
@@ -0,0 +1,29 @@
+# Standard library types.
+type:std::*
+
+# The stdext namespace contains Microsoft standard library extensions.
+type:stdext::*
+
+# Types with a uuid attribute, i.e. COM types.
+type:attr:uuid
+
+# STL allocators (T *allocator::allocate(size_type, const void*)).
+# The type signature mandates a cast from uninitialized void* to T*.
+# size_type can either be unsigned int (j) or unsigned long (m).
+fun:*8allocateEjPKv
+fun:*8allocateEmPKv
+
+# std::get_temporary_buffer, likewise (libstdc++, libc++).
+fun:_ZSt20get_temporary_buffer*
+fun:_ZNSt3__120get_temporary_buffer*
+
+# STL address-of magic (libstdc++, libc++).
+fun:*__addressof*
+fun:_ZNSt3__19addressof*
+
+# Windows C++ stdlib headers that contain bad unrelated casts.
+src:*xmemory0
+src:*xstddef
+
+# b/119127110
+fun:*internal_default_instance*
diff --git a/dsoftbus/build/config/sanitizers/integer_overflow_blocklist.txt b/dsoftbus/build/config/sanitizers/integer_overflow_blocklist.txt
new file mode 100644
index 0000000000000000000000000000000000000000..3480c3c08dba15fe20cbd2dfe4d28674a5886088
--- /dev/null
+++ b/dsoftbus/build/config/sanitizers/integer_overflow_blocklist.txt
@@ -0,0 +1,4 @@
+fun:*([Hh]ash|HASH)*
+fun:*([Cc]rypto|CRYPTO)*
+fun:*([Ss]ha|SHA)(1|256|512)*
+fun:*([Cc]ompress|COMPRESS)*
diff --git a/dsoftbus/build/config/sanitizers/sanitizers.gni b/dsoftbus/build/config/sanitizers/sanitizers.gni
new file mode 100755
index 0000000000000000000000000000000000000000..757999e1bf10a373cbb44d0930fcfa94119671fc
--- /dev/null
+++ b/dsoftbus/build/config/sanitizers/sanitizers.gni
@@ -0,0 +1,294 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/coverage/coverage.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Compile for Address Sanitizer to find memory bugs.
+ is_asan = false
+
+ # Compile for Leak Sanitizer to find leaks.
+ is_lsan = false
+
+ # Compile for Memory Sanitizer to find uninitialized reads.
+ is_msan = false
+
+ # Compile for Thread Sanitizer to find threading bugs.
+ is_tsan = false
+
+ # Compile for Undefined Behavior Sanitizer to find various types of
+ # undefined behavior (excludes vptr checks).
+ is_ubsan = false
+
+ # Halt the program if a problem is detected.
+ is_ubsan_no_recover = false
+
+ # Compile for Undefined Behavior Sanitizer's null pointer checks.
+ is_ubsan_null = false
+
+ # Compile for Undefined Behavior Sanitizer's vptr checks.
+ is_ubsan_vptr = false
+
+ # Compile with SafeStack shadow stack support.
+ is_safestack = false
+
+ # Track where uninitialized memory originates from. From fastest to slowest:
+ # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
+ # chain of stores leading from allocation site to use site.
+ msan_track_origins = 2
+
+ # Use dynamic libraries instrumented by one of the sanitizers instead of the
+ # standard system libraries. Set this flag to download prebuilt binaries from
+ # GCS.
+ use_prebuilt_instrumented_libraries = false
+
+ # Use dynamic libraries instrumented by one of the sanitizers instead of the
+ # standard system libraries. Set this flag to build the libraries from source.
+ use_locally_built_instrumented_libraries = false
+
+ # Compile with Control Flow Integrity to protect virtual calls and casts.
+ # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
+ is_cfi = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
+ is_official_build
+
+ # Enable checks for bad casts: derived cast and unrelated cast.
+ use_cfi_cast = false
+
+ # Enable checks for indirect function calls via a function pointer.
+ use_cfi_icall = target_os == "linux" && !is_chromeos && target_cpu == "x64" &&
+ is_official_build
+
+ # Print detailed diagnostics when Control Flow Integrity detects a violation.
+ use_cfi_diag = false
+
+ # Let Control Flow Integrity continue execution instead of crashing when
+ # printing diagnostics (use_cfi_diag = true).
+ use_cfi_recover = false
+
+ # Compile for fuzzing with LLVM LibFuzzer.
+ # See http://www.chromium.org/developers/testing/libfuzzer
+ use_libfuzzer = false
+
+ # Compile for fuzzing with AFL.
+ use_afl = false
+
+ # Enables core ubsan security features. Will later be removed once it matches
+ # is_ubsan.
+ is_ubsan_security = false
+
+ # Compile for fuzzing with Dr. Fuzz
+ # See http://www.chromium.org/developers/testing/dr-fuzz
+ use_drfuzz = false
+
+ # Helper variable for testing builds with disabled libfuzzer.
+ # Not for client use.
+ disable_libfuzzer = false
+
+ # Optimize for coverage guided fuzzing (balance between speed and number of
+ # branches). Can be also used to remove non-determinism and other issues.
+ optimize_for_fuzzing = false
+
+ # Value for -fsanitize-coverage flag. Setting this causes
+ # use_sanitizer_coverage to be enabled.
+ # This flag is not used for libFuzzer (use_libfuzzer=true) unless we are on
+ # Mac. Instead, we use:
+ # -fsanitize=fuzzer-no-link
+ # Default value when unset and use_fuzzing_engine=true:
+ # trace-pc-guard
+ # Default value when unset and use_sanitizer_coverage=true:
+ # trace-pc-guard,indirect-calls
+ sanitizer_coverage_flags = ""
+}
+
+is_v8_host_toolchain =
+ current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64" ||
+ current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm"
+
+# Disable sanitizers for non-default toolchains.
+if (current_toolchain == host_toolchain || is_v8_host_toolchain) {
+ is_asan = false
+ is_cfi = false
+ is_lsan = false
+ is_msan = false
+ is_tsan = false
+ is_ubsan = false
+ is_ubsan_null = false
+ is_ubsan_no_recover = false
+ is_ubsan_security = false
+ is_ubsan_vptr = false
+ msan_track_origins = 0
+ sanitizer_coverage_flags = ""
+ use_afl = false
+ use_cfi_diag = false
+ use_cfi_recover = false
+ use_drfuzz = false
+ use_libfuzzer = false
+ use_prebuilt_instrumented_libraries = false
+ use_locally_built_instrumented_libraries = false
+ use_sanitizer_coverage = false
+}
+
+# Whether we are doing a fuzzer build. Normally this should be checked instead
+# of checking "use_libfuzzer || use_afl" because often developers forget to
+# check for "use_afl".
+use_fuzzing_engine = use_libfuzzer || use_afl
+
+# Args that are in turn dependent on other args must be in a separate
+# declare_args block. User overrides are only applied at the end of a
+# declare_args block.
+declare_args() {
+ use_sanitizer_coverage =
+ !use_clang_coverage &&
+ (use_fuzzing_engine || sanitizer_coverage_flags != "")
+
+ # Detect overflow/underflow for global objects.
+ #
+ # Mac: http://crbug.com/352073
+ asan_globals = !is_mac
+}
+
+if (use_fuzzing_engine && sanitizer_coverage_flags == "") {
+ sanitizer_coverage_flags = "trace-pc-guard"
+} else if (use_sanitizer_coverage && sanitizer_coverage_flags == "") {
+ sanitizer_coverage_flags = "trace-pc-guard,indirect-calls"
+}
+
+# Whether we are linking against a debugging sanitizer runtime library. Among
+# other things, this changes the default symbol level and other settings in
+# order to prepare to create stack traces "live" using the sanitizer runtime.
+using_sanitizer =
+ is_asan || is_lsan || is_tsan || is_msan || is_ubsan || is_ubsan_null ||
+ is_ubsan_vptr || is_ubsan_security || use_sanitizer_coverage || use_cfi_diag
+
+assert(!using_sanitizer || is_clang,
+ "Sanitizers (is_*san) require setting is_clang = true in 'gn args'")
+
+assert(!is_cfi || is_clang,
+ "is_cfi requires setting is_clang = true in 'gn args'")
+
+assert(!is_safestack || is_clang,
+ "is_safestack requires setting is_clang = true in 'gn args'")
+
+prebuilt_instrumented_libraries_available =
+ is_msan && (msan_track_origins == 0 || msan_track_origins == 2)
+
+if (use_libfuzzer && is_linux) {
+ if (is_asan) {
+ # We do leak checking with libFuzzer on Linux. Set is_lsan for code that
+ # relies on LEAK_SANITIZER define to avoid false positives.
+ is_lsan = true
+ }
+ if (is_msan) {
+ use_prebuilt_instrumented_libraries = true
+ }
+}
+
+# MSan only links Chrome properly in release builds (brettw -- 9/1/2015). The
+# same is possibly true for the other non-ASan sanitizers. But regardless of
+# whether it links, one would normally never run a sanitizer in debug mode.
+# Running in debug mode probably indicates you forgot to set the "is_debug =
+# false" flag in the build args. ASan seems to run fine in debug mode.
+#
+# If you find a use-case where you want to compile a sanitizer in debug mode
+# and have verified it works, ask brettw and we can consider removing it from
+# this condition. We may also be able to find another way to enable your case
+# without having people accidentally get broken builds by compiling an
+# unsupported or unadvisable configurations.
+#
+# For one-off testing, just comment this assertion out.
+assert(!is_debug || !(is_msan || is_ubsan || is_ubsan_null || is_ubsan_vptr),
+ "Sanitizers should generally be used in release (set is_debug=false).")
+
+assert(!is_msan || (is_linux && current_cpu == "x64"),
+ "MSan currently only works on 64-bit Linux and ChromeOS builds.")
+
+assert(!is_lsan || is_asan, "is_lsan = true requires is_asan = true also.")
+
+# ASAN build on Windows is not working in debug mode. Intercepting memory
+# allocation functions is hard on Windows and not yet implemented in LLVM.
+assert(!is_win || !is_debug || !is_asan,
+ "ASan on Windows doesn't work in debug (set is_debug=false).")
+
+# Make sure that if we recover on detection (i.e. not crash), diagnostics are
+# printed.
+assert(!use_cfi_recover || use_cfi_diag,
+ "Only use CFI recovery together with diagnostics.")
+
+assert(
+ !(use_sanitizer_coverage && is_mac && target_os == "ios"),
+ "crbug.com/753445: use_sanitizer_coverage=true is not supported by the " +
+ "Chromium mac_clang_x64 toolchain on iOS distribution. Please set " +
+ "the argument value to false.")
+
+# Use these lists of configs to disable instrumenting code that is part of a
+# fuzzer, but which isn't being targeted (such as libprotobuf-mutator, *.pb.cc
+# and libprotobuf when they are built as part of a proto fuzzer). Adding or
+# removing these lists does not have any effect if use_libfuzzer or use_afl are
+# not passed as arguments to gn.
+not_fuzzed_remove_configs = []
+not_fuzzed_remove_nonasan_configs = []
+
+if (use_fuzzing_engine) {
+ # Removing coverage should always just work.
+ not_fuzzed_remove_configs += [ "//build/config/coverage:default_coverage" ]
+ not_fuzzed_remove_nonasan_configs +=
+ [ "//build/config/coverage:default_coverage" ]
+
+ if (!is_msan) {
+ # Allow sanitizer instrumentation to be removed if we are not using MSan
+ # since binaries cannot be partially instrumented with MSan.
+ not_fuzzed_remove_configs +=
+ [ "//build/config/sanitizers:default_sanitizer_flags" ]
+
+ # Certain parts of binaries must be instrumented with ASan if the rest of
+ # the binary is. For these, only remove non-ASan sanitizer instrumentation.
+ if (!is_asan) {
+ not_fuzzed_remove_nonasan_configs +=
+ [ "//build/config/sanitizers:default_sanitizer_flags" ]
+ assert(not_fuzzed_remove_nonasan_configs == not_fuzzed_remove_configs)
+ }
+ }
+}
+
+template("ohos_sanitizer_config") {
+ config(target_name) {
+ forward_variables_from(invoker,
+ [
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ configs = []
+ _cfi = defined(cfi) && cfi
+ if (_cfi) {
+ configs += [ "//build/config/sanitizers:cfi_config" ]
+ }
+ _scudo = defined(scudo) && scudo
+ if (_scudo) {
+ configs += [ "//build/config/sanitizers:scudo_config" ]
+ }
+ _ubsan = defined(ubsan) && ubsan
+ if (_ubsan) {
+ configs +=
+ [ "//build/config/sanitizers:undefined_behavior_sanitize_config" ]
+ }
+ _integer_sanitize =
+ defined(integer_overflow_sanitize) && integer_overflow_sanitize
+ if (_integer_sanitize) {
+ configs += [ "//build/config/sanitizers:integer_overflow_config" ]
+ }
+ _scs = defined(scs) && scs
+ if (_scs) {
+ configs += [ "//build/config/sanitizers:shadow_call_stack_config" ]
+ }
+ _boundary_sanitize = defined(boundary_sanitize) && boundary_sanitize
+ if (_boundary_sanitize) {
+ configs += [ "//build/config/sanitizers:boundary_sanitize_config" ]
+ }
+ }
+}
diff --git a/dsoftbus/build/config/sysroot.gni b/dsoftbus/build/config/sysroot.gni
new file mode 100755
index 0000000000000000000000000000000000000000..3bc0df45b85c4f175373488c56758c22f5db4ef2
--- /dev/null
+++ b/dsoftbus/build/config/sysroot.gni
@@ -0,0 +1,28 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+declare_args() {
+ # The absolute path of the sysroot that is applied when compiling using
+ # the target toolchain.
+ target_sysroot = ""
+ use_sysroot = current_cpu == "arm" || current_cpu == "arm64"
+}
+
+if (current_os == target_os && current_cpu == target_cpu &&
+ target_sysroot != "") {
+ sysroot = target_sysroot
+} else if (is_ohos) {
+ import("//build/config/ohos/config.gni")
+ sysroot = "${musl_sysroot}"
+} else if (is_mac) {
+ import("//build/config/mac/mac_sdk.gni")
+ sysroot = mac_sdk_path
+} else {
+ sysroot = ""
+}
+
+link_sysroot = sysroot
diff --git a/dsoftbus/build/config/v8_target_cpu.gni b/dsoftbus/build/config/v8_target_cpu.gni
new file mode 100755
index 0000000000000000000000000000000000000000..f738b5f4fa0888cb2ef5c5dbe370380b72e148da
--- /dev/null
+++ b/dsoftbus/build/config/v8_target_cpu.gni
@@ -0,0 +1,58 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+ # This arg is used when we want to tell the JIT-generating v8 code
+ # that we want to have it generate for an architecture that is different
+ # than the architecture that v8 will actually run on; we then run the
+ # code under an emulator. For example, we might run v8 on x86, but
+ # generate arm code and run that under emulation.
+ #
+ # This arg is defined here rather than in the v8 project because we want
+ # some of the common architecture-specific args (like arm_float_abi or
+ # mips_arch_variant) to be set to their defaults either if the current_cpu
+ # applies *or* if the v8_current_cpu applies.
+ #
+ # As described below, you can also specify the v8_target_cpu to use
+ # indirectly by specifying a `custom_toolchain` that contains v8_$cpu in the
+ # name after the normal toolchain.
+ #
+ # For example, `gn gen --args="custom_toolchain=...:clang_x64_v8_arm64"`
+ # is equivalent to setting --args=`v8_target_cpu="arm64"`. Setting
+ # `custom_toolchain` is more verbose but makes the toolchain that is
+ # (effectively) being used explicit.
+ #
+ # v8_target_cpu can only be used to target one architecture in a build,
+ # so if you wish to build multiple copies of v8 that are targeting
+ # different architectures, you will need to do something more
+ # complicated involving multiple toolchains along the lines of
+ # custom_toolchain, above.
+ v8_target_cpu = ""
+}
+
+if (v8_target_cpu == "") {
+ if (current_toolchain == "//build/toolchain/linux:clang_x64_v8_arm64") {
+ v8_target_cpu = "arm64"
+ } else if (current_toolchain == "//build/toolchain/linux:clang_x86_v8_arm") {
+ v8_target_cpu = "arm"
+ } else if (current_toolchain ==
+ "//build/toolchain/linux:clang_x64_v8_x86_64") {
+ v8_target_cpu = "x86_64"
+ } else if (is_msan) {
+ # If we're running under a sanitizer, if we configure v8 to generate
+ # code that will be run under a simulator, then the generated code
+ # also gets the benefits of the sanitizer.
+ v8_target_cpu = "arm64"
+ } else {
+ v8_target_cpu = target_cpu
+ }
+}
+
+declare_args() {
+ # This argument is declared here so that it can be overridden in toolchains.
+ # It should never be explicitly set by the user.
+ v8_current_cpu = v8_target_cpu
+}
diff --git a/dsoftbus/build/config/zip.gni b/dsoftbus/build/config/zip.gni
new file mode 100755
index 0000000000000000000000000000000000000000..9bf829c6606a10e799064d0c4f54e15ac2bdfb2f
--- /dev/null
+++ b/dsoftbus/build/config/zip.gni
@@ -0,0 +1,53 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a zip archive of the inputs.
+#
+# inputs (required)
+# List of input files relative to the current directory.
+#
+# output (required)
+# File name to write.
+#
+# base_dir (optional)
+# If provided, the archive paths will be relative to this directory.
+#
+# deps, public_deps, data_deps, testonly, visibility (optional)
+# Normal meaning.
+template("zip") {
+ action(target_name) {
+ script = "//build/zip.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ inputs = invoker.inputs
+ outputs = [ invoker.output ]
+
+ assert(defined(invoker.inputs))
+ rebase_inputs = rebase_path(invoker.inputs, root_build_dir)
+
+ assert(defined(invoker.output))
+ rebase_output = rebase_path(invoker.output, root_build_dir)
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--inputs=$rebase_inputs",
+ "--output=$rebase_output",
+ ]
+ if (defined(invoker.base_dir)) {
+ args += [
+ "--base-dir",
+ rebase_path(invoker.base_dir, root_build_dir),
+ ]
+ }
+
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "deps",
+ "public_deps",
+ "data_deps",
+ "visibility",
+ ])
+ }
+}
diff --git a/dsoftbus/build/core/build_scripts/common_fun.sh b/dsoftbus/build/core/build_scripts/common_fun.sh
new file mode 100755
index 0000000000000000000000000000000000000000..36373de25f61ca19b0b9cea9aec83c9961ffd1d1
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/common_fun.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+rename_last_log()
+{
+ local log=$1
+ if [ -e "${log}" ]; then
+ if [ "${HOST_OS}x" == "macx" ]; then
+ epoch=$(stat -f %m $log)
+ else
+ epoch=$(stat --format=%Y $log)
+ fi
+ mv $log ${TARGET_OUT_DIR}/build.$epoch.log
+ fi
+}
+
+log_prepare()
+{
+ mkdir -p $TARGET_OUT_DIR
+ log=$1
+ rename_last_log $log
+ touch $log
+}
+
+log()
+{
+ if [ "$#" -lt 1 ]; then
+ return
+ fi
+ echo "$@" | tee -a $LOG_FILE
+}
diff --git a/dsoftbus/build/core/build_scripts/get_gn_parameters.sh b/dsoftbus/build/core/build_scripts/get_gn_parameters.sh
new file mode 100755
index 0000000000000000000000000000000000000000..040b80cb6efbc8ad609beac60e792191245cc098
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/get_gn_parameters.sh
@@ -0,0 +1,85 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+get_gn_parameters()
+{
+ if [ "${TARGET_VERSION_MODE}" == "sanitizer" ]; then
+ IS_ASAN="is_asan=true"
+ fi
+
+ if [ "${COVERAGE}" == true ];then
+ if [ "${TARGET_OS}" == "ohos" ];then
+ GN_ARGS="$GN_ARGS use_clang_coverage=true"
+ fi
+ fi
+
+ if [ "${CUSTOM_CLANG}" == true ];then
+ GN_ARGS="$GN_ARGS use_custom_clang=true"
+ fi
+
+ if [ "${DOUBLE_FRAMEWORK}" == true ];then
+ GN_ARGS="$GN_ARGS is_double_framework=true"
+ fi
+
+ if [ "${EBPF_ENABLE}" == true ];then
+ GN_ARGS="$GN_ARGS ebpf_enable=true"
+ fi
+
+ if [ "${BUILD_XTS}" == true ];then
+ GN_ARGS="$GN_ARGS build_xts=true"
+ log "Build xts enabled"
+ fi
+
+ if [ "${BUILD_OHOS_SDK}" == true ];then
+ GN_ARGS="$GN_ARGS build_ohos_sdk=true"
+ fi
+
+ if [ "${INTERFACE_CHECK}" == false ];then
+ GN_ARGS="$GN_ARGS check_innersdk_interface=false check_sdk_interface=false"
+ fi
+
+ if [ "${TARGET_PLATFORM}" != "" ];then
+ GN_ARGS="build_platform=\"${TARGET_PLATFORM}\" $GN_ARGS"
+ fi
+
+ if [ "${SDK_VERSION}"x != x ];then
+ GN_ARGS="$GN_ARGS sdk_version=\"${SDK_VERSION}\""
+ fi
+
+ if [ "${HOSP_VERSION}"x != x ];then
+ GN_ARGS="$GN_ARGS hosp_version=\"${HOSP_VERSION}\""
+ fi
+
+ if [ "${RELEASE_TYPE}"x != x ];then
+ GN_ARGS="$GN_ARGS release_type=\"${RELEASE_TYPE}\""
+ fi
+
+ if [ "${META_VERSION}"x != x ];then
+ GN_ARGS="$GN_ARGS meta_version=\"${META_VERSION}\""
+ fi
+
+ if [ "${API_VERSION}"x != x ];then
+ GN_ARGS="$GN_ARGS api_version=\"${API_VERSION}\""
+ fi
+
+ if [ "${BUILD_EXAMPLE}" == true ];then
+ GN_ARGS="$GN_ARGS build_example=true"
+ fi
+
+ if [ "${PYCACHE_ENABLE}" == true ];then
+ GN_ARGS="$GN_ARGS pycache_enable=true"
+ fi
+}
diff --git a/dsoftbus/build/core/build_scripts/init_parameters.sh b/dsoftbus/build/core/build_scripts/init_parameters.sh
new file mode 100755
index 0000000000000000000000000000000000000000..06f73924f40e69a08c79abe4e6d9abac35727026
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/init_parameters.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+init_parameter()
+{
+ BUILD_TOOLS_DIR=${BASE_HOME}/prebuilts/build-tools/${HOST_DIR}/bin
+ TEST_BUILD_PARA_STRING=""
+ DEVICE_TYPE=""
+ TARGET_OS=ohos
+ BUILD_VARIANT=release
+ TARGET_ARCH=arm64
+ OUT_DIR=out
+ NINJA_ARGS=""
+ TARGET_VERSION_MODE=""
+ DOUBLE_FRAMEWORK=""
+ BUILD_ONLY_GN=false
+ SKIP_GN_PARSE=false
+ TARGET_PLATFORM=""
+ EBPF_ENABLE=false
+ MAPLE_JOBS=0
+ BUILD_XTS=false
+ RELEASE_TEST_SUITE=false
+ BUILD_MAPLE_TARGETS=false
+ BUILD_OHOS_SDK=false
+ BUILD_VERSION="0"
+ INTERFACE_CHECK=true
+ LITE_PARAM="--chip hi3518ev300"
+ BUILD_EXAMPLE=false
+ PYCAHCE_ENABLE=false
+ USE_NARUTO=false
+ OPEN_SOURCE=false
+ REPO_CBUILD=false
+}
diff --git a/dsoftbus/build/core/build_scripts/make_main.sh b/dsoftbus/build/core/build_scripts/make_main.sh
new file mode 100755
index 0000000000000000000000000000000000000000..770145cb7019fc4038221e675985ba536b0280de
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/make_main.sh
@@ -0,0 +1,95 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+do_make()
+{
+ TARGET_OUT_DIR=${BASE_HOME}/${OUT_DIR}/${TARGET_OS}-${TARGET_ARCH}-${BUILD_VARIANT}
+ if [[ ! -d "${TARGET_OUT_DIR}" ]];then
+ mkdir -p ${TARGET_OUT_DIR}
+ fi
+ if [[ "${TARGET_OS}" == "ohos" && "${TARGET_ARCH}" == "arm64" ]];then
+ TARGET_OUT_DIR_LINK=${BASE_HOME}/${OUT_DIR}/${BUILD_VARIANT}
+ if [[ -d "${TARGET_OUT_DIR_LINK}" ]] || [[ -L "${TARGET_OUT_DIR_LINK}" ]];then
+ rm -r ${TARGET_OUT_DIR_LINK}
+ fi
+ ln -s ${TARGET_OS}-${TARGET_ARCH}-${BUILD_VARIANT} ${TARGET_OUT_DIR_LINK}
+ fi
+
+ # prepare to save build log
+ LOG_FILE=${TARGET_OUT_DIR}/build.log
+ log_prepare $LOG_FILE
+ log "$@"
+
+ BEGIN_TIME=$(date "+%s")
+
+ source ${BUILD_SCRIPT_DIR}/get_gn_parameters.sh
+ get_gn_parameters
+
+ if [ "${SKIP_GN_PARSE}"x = falsex ]; then
+ ${BUILD_TOOLS_DIR}/gn gen ${TARGET_OUT_DIR} \
+ --args="target_os=\"${TARGET_OS}\" target_cpu=\"${TARGET_ARCH}\" is_debug=false \
+ product_name=\"${PRODUCT_NAME}\" \
+ is_component_build=true \
+ ${GN_ARGS} ${TEST_BUILD_PARA_STRING} ${IS_ASAN} \
+ release_test_suite=${RELEASE_TEST_SUITE}" 2>&1 | tee -a $log
+
+ if [ "${PIPESTATUS[0]}" != 0 ]; then
+ log "build: gn gen error"
+ RET=1
+ return
+ fi
+
+ if [[ "${BUILD_ONLY_GN}" = true ]];then
+ RET=0
+ return
+ fi
+ fi
+
+ if [[ "${REPO_CBUILD}" == true ]];then
+ collect_module_info_args="--root-build-dir ${TARGET_OUT_DIR} \
+ --output-file ${TARGET_OUT_DIR}/cbuild/targets_info.json"
+ python ${BASE_HOME}/build/misc/cbuild/collect_module_info.py $collect_module_info_args
+ fi
+
+ if [ "${BUILD_TARGET_NAME}" == "all" ]; then
+ BUILD_TARGET_NAME="make_all make_test"
+ elif [ "${BUILD_TARGET_NAME}" == "" ]; then
+ BUILD_TARGET_NAME=packages
+ fi
+
+ log "Starting Ninja..."
+ NINJA_START_TIME=$(date +%s%N)
+ echo python version: $(python --version)
+ ninja_build_args="--source-root-dir ${BASE_HOME} --root-build-dir ${TARGET_OUT_DIR} \
+ --build-target-name ${BUILD_TARGET_NAME}"
+ if [ "${TARGET_PLATFORM}" != "" ];then
+ ninja_build_args="$ninja_build_args --target-platform ${TARGET_PLATFORM}"
+ fi
+ real_build_target=$(python ${BASE_HOME}/build/scripts/build_target_handler.py $ninja_build_args)
+ echo "build_target: "$real_build_target
+
+ if [ "${USE_NARUTO}"x = "truex" ];then
+ ${BUILD_TOOLS_DIR}/naruto -d keepdepfile -p ${BASE_HOME}/.naruto_cache -C ${TARGET_OUT_DIR} ${real_build_target} ${NINJA_ARGS} 2>&1 | tee -a $log
+ else
+ ${BUILD_TOOLS_DIR}/ninja -d keepdepfile -C ${TARGET_OUT_DIR} ${real_build_target} ${NINJA_ARGS} 2>&1 | tee -a $log
+ fi
+
+ if [ "${PIPESTATUS[0]}" != 0 ]; then
+ log "build: ninja error"
+ RET=1
+ return
+ fi
+}
diff --git a/dsoftbus/build/core/build_scripts/parse_cmdline.sh b/dsoftbus/build/core/build_scripts/parse_cmdline.sh
new file mode 100755
index 0000000000000000000000000000000000000000..d8e351ede111cb9a29070a28367f8be8c4d3f850
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/parse_cmdline.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+parse_cmdline()
+{
+ while [ -n "$1" ]
+ do
+ var="$1"
+ OPTIONS=$(echo ${var%%=*})
+ PARAM=$(echo ${var#*=})
+ echo "OPTIONS=$OPTIONS"
+ echo "PARAM=$PARAM"
+ echo "-------------------"
+ case "$OPTIONS" in
+ test_build_para) TEST_BUILD_PARA_STRING="$PARAM" ;;
+ product_name) PRODUCT_NAME="$PARAM" ;;
+ build_target) BUILD_TARGET_NAME="$BUILD_TARGET_NAME $PARAM" ;;
+ target_os) TARGET_OS="$PARAM" ;;
+ target_cpu) TARGET_ARCH="$PARAM" ;;
+ variant) BUILD_VARIANT="$PARAM" ;;
+ out_dir) OUT_DIR="$PARAM" ;;
+ gn_args) GN_ARGS="$GN_ARGS $PARAM" ;;
+ ninja_args) NINJA_ARGS="$PARAM" ;;
+ versionmode) TARGET_VERSION_MODE="$PARAM" ;;
+ coverage) COVERAGE="$PARAM" ;;
+ custom_clang) CUSTOM_CLANG="$PARAM" ;;
+ double_framework) DOUBLE_FRAMEWORK="$PARAM" ;;
+ build_only_gn) BUILD_ONLY_GN="$PARAM" ;;
+ skip_gn_parse) SKIP_GN_PARSE="$PARAM" ;;
+ target_platform) TARGET_PLATFORM="$PARAM" ;;
+ ebpf_enable) EBPF_ENABLE="$PARAM" ;;
+ build_xts) BUILD_XTS="$PARAM" ;;
+ release_test_suite) RELEASE_TEST_SUITE="$PARAM" ;;
+ build_ohos_sdk) BUILD_OHOS_SDK="$PARAM" ;;
+ interface_check) INTERFACE_CHECK="$PARAM" ;;
+ lite_param) LITE_PARAM="$PARAM" ;;
+ sdk_version) SDK_VERSION="$PARAM" ;;
+ hosp_version) HOSP_VERSION="$PARAM" ;;
+ api_version) API_VERSION="$PARAM" ;;
+ release_type) RELEASE_TYPE="$PARAM" ;;
+ meta_version) META_VERSION="$PARAM" ;;
+ build_example) BUILD_EXAMPLE="$PARAM" ;;
+ pycache_enable) PYCACHE_ENABLE="$PARAM" ;;
+ use_naruto) USE_NARUTO="$PARAM" ;;
+ open_source) OPEN_SOURCE="$PARAM" ;;
+ esac
+ shift
+ done
+ COMMAND_ARGS="$@"
+}
diff --git a/dsoftbus/build/core/build_scripts/post_process.sh b/dsoftbus/build/core/build_scripts/post_process.sh
new file mode 100755
index 0000000000000000000000000000000000000000..388a8e21922cbb7e975f48fcf0cf0230d858f3b1
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/post_process.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+ninja_trace()
+{
+ if [ "${NINJA_START_TIME}x" == "x" ]; then
+ return
+ fi
+ #generate build.trace to TARGET_OUT_DIR dir.
+ if [ -f "${TARGET_OUT_DIR}"/.ninja_log ]; then
+ if [ -f "${BASE_HOME}"/build/scripts/ninja2trace.py ]; then
+ python3 ${BASE_HOME}/build/scripts/ninja2trace.py --ninja-log ${TARGET_OUT_DIR}/.ninja_log \
+ --trace-file ${TARGET_OUT_DIR}/build.trace --ninja-start-time $NINJA_START_TIME \
+ --duration-file ${TARGET_OUT_DIR}/sorted_action_duration.txt
+ fi
+ fi
+}
+
+calc_build_time()
+{
+ END_TIME=$(date "+%s")
+ log "used: $(expr $END_TIME - $BEGIN_TIME) seconds"
+}
+
+get_build_warning_list()
+{
+ if [ -f "${BASE_HOME}"/build/scripts/get_warnings.py ];then
+ python3 ${BASE_HOME}/build/scripts/get_warnings.py --build-log-file ${TARGET_OUT_DIR}/build.log --warning-out-file ${TARGET_OUT_DIR}/packages/WarningList.txt
+ fi
+}
+
+generate_opensource_package()
+{
+ log "generate opensource package"
+ if [ -f "${BASE_HOME}"/build/scripts/code_release.py ];then
+ python3 "${BASE_HOME}"/build/scripts/code_release.py
+
+ if [ ! -d "${TARGET_OUT_DIR}"/packages/code_opensource ];then
+ mkdir -p "${TARGET_OUT_DIR}"/packages/code_opensource
+ fi
+
+ cp "${BASE_HOME}"/out/Code_Opensource.tar.gz "${TARGET_OUT_DIR}"/packages/code_opensource/Code_Opensource.tar.gz -f
+ fi
+}
+
+ccache_stat()
+{
+ if [[ ! -z "${CCACHE_EXEC}" ]] && [[ ! -z "${USE_CCACHE}" ]] && [[ "${USE_CCACHE}" == 1 ]]; then
+ log "ccache statistics"
+ if [ -e "${LOG_FILE}" -a -e "${CCACHE_LOGFILE}" ]; then
+ python3 ${BASE_HOME}/build/scripts/summary_ccache_hitrate.py $CCACHE_LOGFILE | tee -a $LOG_FILE
+ fi
+ fi
+}
+
+pycache_stat()
+{
+ log "pycache statistics"
+ python3 ${BASE_HOME}/build/scripts/util/pyd.py --stat
+}
+
+pycache_manage()
+{
+ log "manage pycache contents"
+ python3 ${BASE_HOME}/build/scripts/util/pyd.py --manage
+}
+
+pycache_stop()
+{
+ log "pycache daemon exit"
+ python3 ${BASE_HOME}/build/scripts/util/pyd.py --stop
+}
+post_process()
+{
+ if [ "${OPEN_SOURCE}" == true ];then
+ generate_opensource_package
+ fi
+
+ calc_build_time
+ pycache_stat
+ pycache_manage
+ pycache_stop
+ ninja_trace
+ ccache_stat
+
+ python3 ${BASE_HOME}/build/ohos/statistics/build_overlap_statistics.py --build-out-dir ${TARGET_OUT_DIR} --subsystem-config-file ${BASE_HOME}/build/subsystem_config.json --root-source-dir ${BASE_HOME}
+ get_build_warning_list
+ echo "post_process"
+}
diff --git a/dsoftbus/build/core/build_scripts/pre_process.sh b/dsoftbus/build/core/build_scripts/pre_process.sh
new file mode 100755
index 0000000000000000000000000000000000000000..8270df959a8e3ff3511e3d6d7bd30554739beef9
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/pre_process.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+pre_process()
+{
+ echo "pre_process"
+ case $(uname -s) in
+ Darwin)
+ HOST_DIR="darwin-x86"
+ HOST_OS="mac"
+ ;;
+ Linux)
+ HOST_DIR="linux-x86"
+ HOST_OS="linux"
+ ;;
+ *)
+ echo "Unsupported host platform: $(uname -s)"
+ RET=1
+ exit $RET
+ esac
+
+ export PATH=${BASE_HOME}/prebuilts/python/${HOST_DIR}/3.8.5/bin:${BASE_HOME}/prebuilts/build-tools/${HOST_DIR}/bin:$PATH
+ python --version
+
+ source ${BUILD_SCRIPT_DIR}/init_parameters.sh
+ source ${BUILD_SCRIPT_DIR}/parse_cmdline.sh
+ source ${BUILD_SCRIPT_DIR}/common_fun.sh
+ source ${BUILD_SCRIPT_DIR}/trap_ctrlc.sh
+
+ init_parameter "$@"
+ parse_cmdline "$@"
+ # Trap SIGINT
+ trap "trap_ctrlc" 2
+
+ if [ "${PYCACHE_ENABLE}" == true ];then
+ source ${BUILD_SCRIPT_DIR}/set_pycache.sh
+ set_pycache
+ fi
+}
diff --git a/dsoftbus/build/core/build_scripts/set_ccache.sh b/dsoftbus/build/core/build_scripts/set_ccache.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c3181dd3549fa6750a47de29e7e37dfd7127375c
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/set_ccache.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+function set_ccache() {
+ # user can use environment variable "CCACHE_BASE" to customize ccache directory
+ if [ -z "$CCACHE_BASE" ]; then
+ CCACHE_BASE=${OHOS_ROOT_PATH}/.ccache
+ if [ ! -d "$CCACHE_BASE" ]; then
+ mkdir -p $CCACHE_BASE
+ chmod -R 777 $CCACHE_BASE
+ fi
+ fi
+ echo "CCACHE_DIR="$CCACHE_BASE
+ export USE_CCACHE=1
+ export CCACHE_DIR=$CCACHE_BASE
+ export CCACHE_UMASK=002
+ if [ -f "${OHOS_ROOT_PATH}"/ccache.log ]; then
+ mv ${OHOS_ROOT_PATH}/ccache.log ${OHOS_ROOT_PATH}/ccache.log.old
+ fi
+ export CCACHE_LOGFILE=${OHOS_ROOT_PATH}/ccache.log
+ ${CCACHE_EXEC} -M 50G
+}
diff --git a/dsoftbus/build/core/build_scripts/set_pycache.sh b/dsoftbus/build/core/build_scripts/set_pycache.sh
new file mode 100755
index 0000000000000000000000000000000000000000..48d16ccf8798643582ba5100c7270f1d865b6fbc
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/set_pycache.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+function set_pycache()
+{
+ source ${BASE_HOME}/build/config/pycache/pycache.config
+ if [ "${PYCACHE_DIR}"x == x ];then
+ export PYCACHE_DIR=${BASE_HOME}/.pycache
+ else
+ export PYCACHE_DIR=$PYCACHE_DIR
+ fi
+ python3 ${BASE_HOME}/build/scripts/util/pyd.py --root ${PYCACHE_DIR} --start &
+}
diff --git a/dsoftbus/build/core/build_scripts/trap_ctrlc.sh b/dsoftbus/build/core/build_scripts/trap_ctrlc.sh
new file mode 100755
index 0000000000000000000000000000000000000000..e94186b5e7260ca43399e4f83f19197f6c4c76c6
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/trap_ctrlc.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -e
+
+function trap_ctrlc()
+{
+ echo "Caught Ctrl+C, doing post build cleaning"
+ source ${BUILD_SCRIPT_DIR}/post_process.sh
+ post_process
+}
diff --git a/dsoftbus/build/core/build_scripts/verify_notice.sh b/dsoftbus/build/core/build_scripts/verify_notice.sh
new file mode 100755
index 0000000000000000000000000000000000000000..3fc71d0ea68aafc5dd431c947c92e3b1640e0a56
--- /dev/null
+++ b/dsoftbus/build/core/build_scripts/verify_notice.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# Author: Tools Team
+# param description:
+# $1 : the path of notice file to be verified.
+# $2 : the file of writing verification results.
+# $3 : the current platform dir.
+# notice.
+
+set -e
+
+if [ ! -f "$1" ]; then
+ echo "Note: The notice file $1 does not exist."
+ echo "Success" > $2
+ exit 0
+fi
+
+line_of_equal_in_file="$3/lineOfEqualInFile"
+line_of_new_file_flag_in_file="$3/lineOfNewFileFlagInFile"
+line_of_divide_in_file="$3/lineOfDivideInFile"
+
+rm -f $line_of_equal_in_file
+rm -f $line_of_new_file_flag_in_file
+rm -f $line_of_divide_in_file
+
+NOTICEFILE=$1
+
+grep -n "^============================================================$" $NOTICEFILE | cut -d ':' -f 1 > $line_of_equal_in_file
+grep -n "Notices for file(s):" $NOTICEFILE | cut -d ':' -f 1 > $line_of_new_file_flag_in_file
+grep -n "^------------------------------------------------------------$" $NOTICEFILE | cut -d ':' -f 1 > $line_of_divide_in_file
+
+nums_equal_in_file=$(cat $line_of_equal_in_file | wc -l)
+nums_new_file_flag_in_file=$(cat $line_of_new_file_flag_in_file | wc -l)
+nums_divid_in_file=$(cat $line_of_divide_in_file | wc -l)
+
+if [[ "$nums_equal_in_file" != "$nums_new_file_flag_in_file" ]];then
+ echo "Error:nums_equal_in_file is $nums_equal_in_file,nums_new_file_flag_in_file is $nums_new_file_flag_in_file"
+ echo "Failed" > $2
+ exit 1
+elif [[ "$nums_equal_in_file" != "$nums_divid_in_file" ]];then
+ echo "Warning!! maybe something wrong! nums_equal_in_file is $nums_equal_in_file,nums_divid_in_file is $nums_divid_in_file"
+fi
+
+rm -f $line_of_equal_in_file
+rm -f $line_of_new_file_flag_in_file
+rm -f $line_of_divide_in_file
+
+echo "Success" > $2
+
+set +e
diff --git a/dsoftbus/build/core/gn/BUILD.gn b/dsoftbus/build/core/gn/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..38d3cc964d567148b810aac78a83b65590046808
--- /dev/null
+++ b/dsoftbus/build/core/gn/BUILD.gn
@@ -0,0 +1,126 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/ohos_var.gni")
+
+print("root_out_dir=$root_out_dir")
+print("root_build_dir=$root_build_dir")
+print("root_gen_dir=$root_gen_dir")
+print("current_toolchain=$current_toolchain")
+print("host_toolchain=$host_toolchain")
+
+# load build configs and write load result to out_build_dir/build_configs
+build_loader_script = rebase_path("//build/loader/load.py")
+
+_platforms_config_file = "//out/build_configs/standard_system/platforms.build"
+
+build_platform = ""
+
+_subsystem_config_file = "//out/build_configs/subsystem_config.json"
+arguments = [
+ "--subsystem-config-file",
+ rebase_path(_subsystem_config_file),
+ "--platforms-config-file",
+ rebase_path(_platforms_config_file),
+ "--source-root-dir",
+ rebase_path("//"),
+ "--gn-root-out-dir",
+ rebase_path(root_build_dir),
+ "--target-os",
+ target_os,
+ "--target-cpu",
+ target_cpu,
+]
+
+if (build_platform != "") {
+ arguments += [
+ "--build-platform-name",
+ build_platform,
+ ]
+}
+
+if (scalable_build) {
+ arguments += [ "--scalable-build" ]
+}
+
+if (build_example) {
+ arguments += [
+ "--example-subsystem-file",
+ rebase_path("//build/subsystem_config_example.json", "$root_out_dir"),
+ ]
+}
+if (build_xts) {
+ arguments += [ "--build-xts" ]
+}
+
+arguments += [
+ "--ignore-api-check",
+ "xts",
+ "common",
+ "subsystem_examples",
+]
+
+load_result = exec_script(build_loader_script, arguments, "string")
+
+if (load_result != "") {
+ print()
+ print(load_result)
+}
+
+print("build configs generation is complete.")
+
+# gn target defined
+if (product_name == "ohos-sdk") {
+ group("build_ohos_sdk") {
+ deps = [ "//build/ohos/sdk:ohos_sdk" ]
+ }
+} else {
+ group("make_all") {
+ deps = [
+ ":images",
+ ":make_inner_kits",
+ ":packages",
+ ]
+ }
+
+ group("images") {
+ deps = [ "//build/ohos/images:make_images" ]
+ }
+
+ group("packages") {
+ deps = [ "//build/ohos/packages:make_packages" ]
+ }
+
+ group("make_inner_kits") {
+ deps = [ "$root_build_dir/build_configs:inner_kits" ]
+ }
+
+ group("build_all_test_pkg") {
+ testonly = true
+ deps = [
+ "$root_build_dir/build_configs:parts_test",
+ "//test/developertest:make_temp_test",
+ ]
+ }
+
+ group("make_test") {
+ testonly = true
+ deps = [ ":build_all_test_pkg" ]
+ deps += [ "//build/ohos/packages:package_testcase_mlf" ]
+ deps += [ "//build/ohos/packages:package_testcase" ]
+ if (archive_component) {
+ deps += [ "//build/ohos/testfwk:archive_testcase" ]
+ }
+ }
+}
diff --git a/dsoftbus/build/core/gn/dotfile.gn b/dsoftbus/build/core/gn/dotfile.gn
new file mode 100755
index 0000000000000000000000000000000000000000..6f97447fd8bdc97104ae67a9b4ba58c78c6e6a07
--- /dev/null
+++ b/dsoftbus/build/core/gn/dotfile.gn
@@ -0,0 +1,22 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# The location of the build configuration file.
+buildconfig = "//build/config/BUILDCONFIG.gn"
+
+# The source root location.
+root = "//build/core/gn"
+
+# The executable used to execute scripts in action and exec_script.
+script_executable = "/usr/bin/env"
diff --git a/dsoftbus/build/docs/how-to-build-a-hap.md b/dsoftbus/build/docs/how-to-build-a-hap.md
new file mode 100644
index 0000000000000000000000000000000000000000..d5bf69dcbcfbad14e313f98393123c7aff1563d1
--- /dev/null
+++ b/dsoftbus/build/docs/how-to-build-a-hap.md
@@ -0,0 +1,111 @@
+# hap的编译
+
+## 术语
+
+gn 目标:
+
+## hap包的构成
+
+L2上的hap包由资源,raw assets,js assets,native库,config.json等部分构成。
+
+
+## 编译系统提供的模板
+
+编译系统提供了4个模板,用来编译hap包。
+
+模板集成在ohos.gni中,使用之前需要引用build/ohos.gni
+
+### ohos_resources
+
+- 声明一个资源目标。资源目标被restool编译之后会生成index文件,hap中会打包资源源文件和index文件。
+- 该目标会同时生成资源编译后的ResourceTable.h,直接依赖该目标就可以引用该头文件
+
+- 资源目标的目标名必须以"resources"或"resource"或"res"结尾,否则编译检查时会报错
+- 支持的变量:
+ 1. sources: 资源的路径,变量类型是list,可以写多个路径
+ 2. hap_profile: 编译资源时需要提供对应hap包的config.json
+ 3. deps: 当前目标的依赖,可选
+
+### ohos_assets
+
+- 声明一个资产目标
+- 注意拼写:assets不是assert
+- assets目标的目标名必须以"assets"或"asset"结尾
+- 支持的变量:
+ 1. sources:raw assets所在路径,变量类型是list,可以写多个路径
+ 2. deps: 当前目标的依赖,可选
+
+## ohos_js_assets
+
+- 声明一个JS 资源目标,JS资源是L2 hap包的可执行部分
+- JS assets目标的目标名必须以"assets"或"asset"结尾
+- 支持的变量:
+ 1. source_dir: JS 资源的路径,变量类型是string,只能写一个
+ 2. deps: 当前目标的依赖,可选
+
+### ohos_hap
+
+- 声明一个hap目标,该目标会生成一个hap包,最终将会打包到system镜像中
+
+- 支持的变量:
+
+ 1. hap_profile: hap包的config.json
+
+ 2. deps: 当前目标的依赖
+
+ 3. shared_libraries: 当前目标依赖的native库
+
+ 4. hap_name: hap包的名字,可选,默认为目标名
+
+ 5. final_hap_path: 用户可以制定生成的hap的位置,可选,final_hap_path中会覆盖hap_name
+
+ 6. subsystem_name: hap包从属的子系统名,需要和ohos.build中的名字对应,否则将导致无法安装到system镜像中
+
+ 7. part_name: hap包从属的部件名,同subsystem_name
+
+ 8. js2abc: 是否需要将该hap包转换为ARK的字节码
+
+ 签名篇见:https://gitee.com/openharmony/docs/blob/master/zh-cn/application-dev/quick-start/%E9%85%8D%E7%BD%AEOpenHarmony%E5%BA%94%E7%94%A8%E7%AD%BE%E5%90%8D%E4%BF%A1%E6%81%AF.md
+
+ 9. certificate_profile: hap对应的授权文件,用于签名
+
+ 10. certificate_file: 证书文件,证书文件和授权文件,应用开发者需要去openharmony官网申请
+
+ 11. keystore_path: keystore文件,用于签名
+
+ 12. keystore_password: keystore的密码,用于签名
+
+ 13. key_alias: key的别名
+
+ 14. module_install_name:安装时的hap包名称
+
+ 15. module_install_dir: 安装到system中的位置,默认安装在system/app目录下
+
+## 一个例子
+
+```json
+import("//build/ohos.gni") # 引用ohos.gni
+
+ohos_hap("clock") {
+ hap_profile = "./src/main/config.json" # config.json
+ deps = [
+ ":clock_js_assets", # JS assets
+ ":clock_resources", # 资源
+ ]
+ shared_libraries = [
+ "//third_party/libpng:libpng", # native库
+ ]
+ certificate_profile = "../signature/systemui.p7b" # Cer文件
+ hap_name = "SystemUI-NavigationBar" # 名字
+ part_name = "prebuilt_hap"
+ subsystem_name = "applications"
+}
+ohos_js_assets("clock_js_assets") {
+ source_dir = "./src/main/js/default"
+}
+ohos_resources("clock_resources") {
+ sources = [ "./src/main/resources" ]
+ hap_profile = "./src/main/config.json"
+}
+```
+
diff --git "a/dsoftbus/build/docs/\345\205\263\344\272\216deps\345\217\212external_deps\347\232\204\344\275\277\347\224\250.md" "b/dsoftbus/build/docs/\345\205\263\344\272\216deps\345\217\212external_deps\347\232\204\344\275\277\347\224\250.md"
new file mode 100755
index 0000000000000000000000000000000000000000..ce81ac7c0269dfb67230cda61466d91a5232dc94
--- /dev/null
+++ "b/dsoftbus/build/docs/\345\205\263\344\272\216deps\345\217\212external_deps\347\232\204\344\275\277\347\224\250.md"
@@ -0,0 +1,86 @@
+## 关于deps、external_deps的使用
+
+在添加一个模块的时候,需要在BUILD.gn中声明它的依赖,为了便于后续处理部件间依赖关系,我们将依赖分为两种——部件内依赖deps和部件间依赖external_deps。
+
+### 依赖分类:
+
+**部件内依赖:** 现有模块module1属于部件part1,要添加一个属于部件part1的模块module2,module2依赖于module1,这种情况就属于部件内依赖。
+
+**部件间依赖:** 现有模块module1属于部件part1,要添加一个模块module2,module2依赖于module1,module2属于部件part2。模块module2与模块module1分属于两个不同的部件,这种情况就属于部件间依赖。
+
+部件内依赖使用deps,部件间依赖使用external_deps。
+
+### 示例:
+
+**部件内依赖示例:**
+
+```gn
+import("//build/ohos.gni")
+ohos_shared_library("module1") {
+ ……
+ part_name = "part1" # 必选,所属部件名称
+}
+```
+
+```
+import("//build/ohos.gni")
+ohos_shared_library("module2") {
+ ……
+ deps = [
+ "module1的gn target",
+ ……
+ ] # 部件内模块依赖
+ part_name = "part1" # 必选,所属部件名称
+}
+```
+
+部件内依赖和一般的依赖一样
+
+**部件间依赖示例:**
+
+```
+import("//build/ohos.gni")
+ohos_shared_library("module1") {
+ ……
+ part_name = "part1" # 必选,所属部件名称
+}
+```
+
+模块1所属部件的ohos.build文件
+
+```
+{
+ "subsystem":"子系统名称",
+ "parts": {
+ "part1": {
+ "inner_kits": [
+ {
+ "header": {
+ "header_base": "头文件所属目录", # 头文件所属目录
+ "header_files": [
+ "头文件名"
+ ] # 头文件名列表
+ },
+ "name": "module1的gn target"
+ },
+ ],
+ ……
+ }
+ }
+}
+```
+
+```
+import("//build/ohos.gni")
+ohos_shared_library("module2") {
+ ……
+ external_deps = [
+ "part1:module1",
+ ……
+ ] # 部件间模块依赖,这里依赖的模块必须是依赖的部件声明在inner_kits中的模块
+ part_name = "part2" # 必选,所属部件名称
+}
+```
+
+注意:部件间依赖要写在external\_deps里面,格式为”部件名:模块名"的形式,并且依赖的模块必须是依赖的部件声明在inner_kits中的模块。
+
diff --git "a/dsoftbus/build/docs/\346\240\207\345\207\206\347\263\273\347\273\237\345\246\202\344\275\225\346\267\273\345\212\240\344\270\200\344\270\252\346\250\241\345\235\227.md" "b/dsoftbus/build/docs/\346\240\207\345\207\206\347\263\273\347\273\237\345\246\202\344\275\225\346\267\273\345\212\240\344\270\200\344\270\252\346\250\241\345\235\227.md"
new file mode 100755
index 0000000000000000000000000000000000000000..ad75f81c7f374d804aee201048b06eceb12a2fc2
--- /dev/null
+++ "b/dsoftbus/build/docs/\346\240\207\345\207\206\347\263\273\347\273\237\345\246\202\344\275\225\346\267\273\345\212\240\344\270\200\344\270\252\346\250\241\345\235\227.md"
@@ -0,0 +1,256 @@
+# 标准系统如何添加一个模块
+
+要添加的模块可以分为以下三种情况,对原有的配置文件进行不同程度的修改。
+
++ 在原有部件中添加一个模块
++ 新建部件并在其中添加模块
++ 新建子系统并在该子系统的部件下添加模块
+
+## 在原有部件中添加一个模块
+
+1. 在模块目录下配置BUILD.gn,根据类型选择对应的模板。
+
+ **支持的模板类型:**
+
+ ```
+ ohos_executable
+ ohos_shared_library
+ ohos_static_library
+ ohos_source_set
+
+ # 预编译模板:
+ ohos_prebuilt_executable
+ ohos_prebuilt_shared_library
+ ohos_prebuilt_etc
+ ```
+
+ **例子:**
+
+ *ohos_shared_library示例*
+
+ ```
+ import("//build/ohos.gni")
+ ohos_shared_library("helloworld") {
+ sources = []
+ include_dirs = []
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ ldflags = []
+ configs = []
+ deps =[] # 部件内模块依赖
+
+ # 跨部件模块依赖定义,
+ # 定义格式为 "部件名:模块名称"
+ # 这里依赖的模块必须是依赖的部件声明在inner_kits中的模块
+ external_deps = [
+ "part_name:module_name",
+ ]
+
+ output_name = "" # 可选,模块输出名
+ output_extension = "" # 可选,模块名后缀
+ module_install_dir = "" # 可选,缺省在/system/lib64或/system/lib下, 模块安装路径,模块安装路径,从system/,vendor/后开始指定
+ relative_install_dir = "" # 可选,模块安装相对路径,相对于/system/lib64或/system/lib;如果有module_install_dir配置时,该配置不生效
+
+ part_name = "" # 必选,所属部件名称
+ }
+ ```
+
+ _ohos\_executable示例:_
+
+ ohos\_executable模板属性和ohos\_shared\_library基本一致
+
+ _注意:可执行模块(即ohos\_executable模板定义的)默认是不安装的,如果要安装,需要指定install\_enable = true_
+
+ _ohos\_prebuilt\_etc示例:_
+
+ ```
+ import("//build/ohos.gni")
+ ohos_prebuilt_etc("etc_file") {
+ source = "file"
+ deps = [] # 部件内模块依赖
+ module_install_dir = "" # 可选,模块安装路径,从system/,vendor/后开始指定
+ relative_install_dir = "" # 可选,模块安装相对路径,相对于system/etc;如果有module_install_dir配置时,该配置不生效
+ part_name = "" # 必选,所属部件名称
+ }
+ ```
+
+2. 修改包含该模块所属部件的ohos.build配置文件
+
+ 以下是一个ohos.build文件
+
+ ```build
+ {
+ "subsystem": "ace",
+ "parts": {
+ "napi": {
+ "module_list": [
+ "//foundation/ace/napi:napi_packages"
+ ],
+ "inner_kits": [
+ ],
+ "test_list": [
+ "//foundation/ace/napi:napi_packages_test",
+ "//foundation/ace/napi/test/unittest:unittest"
+ ]
+ }
+ }
+ }
+ ```
+
+ **文件说明:**
+
+ subsystem定义了子系统的名称;parts定义了子系统包含的部件。
+
+ 一个部件包含部件名,部件包含的模块module_list,部件提供给其它部件的接口inner_kits,部件的测试用例test_list。
+
+ **修改内容:**
+
+ 要添加一个模块到已有部件中去,只需要在该部件的module_list中添加新加模块的gn编译目标;假如该模块提供给其它模块接口,需要在inner_kits中添加对应的配置;如果有该模块的测试用例,需要添加到test_list中去。
+
+## 新建部件并在其中添加模块
+
+1. 在模块目录下配置BUILD.gn,根据类型选择对应的模板。
+
+ 这一步与在原有部件中添加一个模块的方法基本一致,只需注意该模块对应BUILD.gn文件中的part_name为新建部件的名称即可。
+
+2. 修改或者新建ohos.build配置文件
+
+ ```build
+ {
+ "subsystem": "子系统名",
+ "parts": {
+ "新建部件名": {
+ "module_list": [
+ "部件包含模块的gn目标"
+ ],
+ "inner_kits": [
+ ],
+ "test_list": [
+ "测试用例",
+ ]
+ }
+ }
+ }
+ ```
+
+ 在原有子系统中添加一个新的部件,有两种方法,一种是在该子系统原有的ohos.build文件中添加该部件,另一种是新建一个ohos.build文件,注意无论哪种方式该ohos.build文件均在对应子系统所在文件夹下。
+
+ ohos.build文件包含两个部分,第一部分subsystem说明了子系统的名称,parts定义了该子系统包含的部件,要添加一个部件,需要把该部件对应的内容添加进parts中去。添加的时候需要指明该部件包含的模块module_list,假如有提供给其它部件的接口,需要在inner_kits中说明,假如有测试用例,需要在test_list中说明,inner_kits与test_list没有也可以不添加。
+
+3. 在productdefine/common/products目录下的产品配置文件(json格式)中添加对应的部件,直接添加到原有部件后即可。
+
+ ```json
+ {
+ "parts":{
+ "部件所属子系统名:部件名":{}
+ }
+ }
+ ```
+
+## 新建子系统并在该子系统的部件下添加模块
+
+1. 在模块目录下配置BUILD.gn,根据类型选择对应的模板。这一步与新建部件并在其中添加模块中对应的步骤并无区别。
+
+2. 在新建的子系统目录下每个部件对应的文件夹下创建ohos.build文件,定义部件信息。这一步与新建部件并在其中添加模块中对应的步骤并无区别。
+
+3. 修改build目录下的subsystem_config.json文件
+
+ ```json
+ {
+ "子系统名": {
+ "path": "子系统目录",
+ "name": "子系统名",
+ ...
+ }
+ }
+ ```
+
+ 该文件定义了有哪些子系统以及这些子系统所在文件夹路径,添加子系统时需要说明子系统path与name,分别表示子系统路径和子系统名。
+
+4. 在productdefine/common/products目录下的产品配置如Hi3516DV300.json中添加对应的部件,直接添加到原有部件后即可。
+
+ ```json
+ {
+ ...
+ "parts":{
+ "部件所属子系统名:部件名":{}
+ }
+ }
+ ```
+
+ **成功添加验证:**
+
+ + 在输出文件夹的对应子系统文件夹下的部件文件夹下的BUILD.gn文件中module_list包含了新建模块的BUILD.gn中定义的目标。
+ + 编译完成后打包到image中去,生成对应的so文件或者二进制文件
+
+## 配置文件说明
+
+鸿蒙操作系统的配置文件主要有四个
+
+1. productdefine/common/products目录下的产品名.json文件
+
+ ```json
+ {
+ "product_name": "Hi3516DV300",
+ "product_company": "hisilicon",
+ "product_device": "hi3516dv300",
+ "version": "2.0",
+ "type": "standard",
+ "parts":{
+ "ace:ace_engine_standard":{},
+ "ace:napi":{},
+ "account:os_account_standard":{},
+ "distributeddatamgr:native_appdatamgr":{},
+ "distributeddatamgr:distributeddatamgr":{},
+ "distributeddatamgr:appdatamgr_jskits":{},
+ }
+ }
+ ```
+
+ 指明了产品名,产品厂商,产品设备,版本,要编译的系统类型,以及产品包含的部件。
+
+2. build目录下的subsystem_config.json文件
+
+ ```json
+ {
+ "ace": {
+ "project": "hmf/ace",
+ "path": "foundation/ace",
+ "name": "ace",
+ "dir": "foundation"
+ }
+ }
+ ```
+
+ 该文件对子系统进行了说明,我们需要该子系统定义中的name与path,分别表示子系统的名称和所在文件夹路径。
+
+3. 子系统中ohos.build文件
+
+ ```build
+ {
+ "subsystem": "ace",
+ "parts": {
+ "napi": {
+ "module_list": [
+ "//foundation/ace/napi:napi_packages"
+ ],
+ "inner_kits": [
+ ],
+ "test_list": [
+ "//foundation/ace/napi:napi_packages_test",
+ "//foundation/ace/napi/test/unittest:unittest"
+ ]
+ }
+ }
+ }
+ ```
+
+ ohos.build文件定义了子系统包含的部件。
+
+ 每个部件定义它所包含的模块目标module_list,以及部件间交互的接口inner_kits,测试用例test_list。部件包含的模块目标module_list是必须要说明的。
+
+4. 每个模块对应的BUILD.gn文件
+
+ 可以使用提供的模板,也可以使用gn语法规则自定义编写。
+
diff --git "a/dsoftbus/build/docs/\347\274\226\350\257\221\346\211\253\346\217\217\350\257\264\346\230\216.md" "b/dsoftbus/build/docs/\347\274\226\350\257\221\346\211\253\346\217\217\350\257\264\346\230\216.md"
new file mode 100755
index 0000000000000000000000000000000000000000..27553f2acc64dd1c57fd25f309ef9f1c30540b97
--- /dev/null
+++ "b/dsoftbus/build/docs/\347\274\226\350\257\221\346\211\253\346\217\217\350\257\264\346\230\216.md"
@@ -0,0 +1,10 @@
+# 编译扫描说明
+
+在编译的时候,需要扫描相应的子系统配置文件,子系统配置文件包含以下三个文件,定义了子系统的名称和所在路径。
+
+1. build文件夹下的subsystem_config.json文件,主要包含子系统名称与路径信息,在preloader阶段被加载,根据子系统名称和路径信息查找该路径下的ohos.build文件。
+2. product配置文件,在productdefine/common/products文件夹下,文件名为{产品名称}.json,在preloader阶段被加载。该配置文件主要包含产品名称,产品厂商,产品设备名,产品类型,产品对应子系统路径,产品所包含的部件等信息。配置该产品对应子系统时在该文件中添加product_build_path表示产品子系统目录,preloader阶段从该目录下加载对应子系统部件配置信息。
+3. device配置文件,在productdefine/common/device文件夹下,文件名为{产品设备名称}.json,在preloader阶段被加载。该配置文件主要包含设备名称,设备厂商,设备的目标os与目标cpu,设备对应子系统路径等信息。配置该设备对应子系统时在该文件中添加device_build_path表示设备子系统目录,preloader阶段从该目录下加载对应子系统部件配置信息。
+
+现在支持在产品和设备配置的时候添加产品和设备对应的子系统路径,扫描时扫描对应目录下的子系统部件配置信息。
+
diff --git a/dsoftbus/build/gn_helpers.py b/dsoftbus/build/gn_helpers.py
new file mode 100755
index 0000000000000000000000000000000000000000..bec6b13fd6125613ca2cace91fab4cd6812c858d
--- /dev/null
+++ b/dsoftbus/build/gn_helpers.py
@@ -0,0 +1,359 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that integrate with GN.
+
+The main functions are ToGNString and FromGNString which convert between
+serialized GN variables and Python variables.
+
+To use in a random python file in the build:
+
+ import os
+ import sys
+
+ sys.path.append(os.path.join(os.path.dirname(__file__),
+ os.pardir, os.pardir, "build"))
+ import gn_helpers
+
+Where the sequence of parameters to join is the relative path from your source
+file to the build directory.
+"""
+
+
+class GNException(Exception):
+ pass
+
+
+def ToGNString(value, allow_dicts=True):
+ """Returns a stringified GN equivalent of the Python value.
+
+ allow_dicts indicates if this function will allow converting dictionaries
+ to GN scopes. This is only possible at the top level, you can't nest a
+ GN scope in a list, so this should be set to False for recursive calls."""
+ if isinstance(value, str):
+ if value.find('\n') >= 0:
+ raise GNException("Trying to print a string with a newline in it.")
+ return '"' + \
+ value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \
+ '"'
+
+ if isinstance(value, str):
+ return ToGNString(value.encode('utf-8'))
+
+ if isinstance(value, bool):
+ if value:
+ return "true"
+ return "false"
+
+ if isinstance(value, list):
+ return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
+
+ if isinstance(value, dict):
+ if not allow_dicts:
+ raise GNException("Attempting to recursively print a dictionary.")
+ result = ""
+ for key in sorted(value):
+ if not isinstance(key, str):
+ raise GNException("Dictionary key is not a string.")
+ result += "%s = %s\n" % (key, ToGNString(value[key], False))
+ return result
+
+ if isinstance(value, int):
+ return str(value)
+
+ raise GNException("Unsupported type when printing to GN.")
+
+
+def FromGNString(input_string):
+ """Converts the input string from a GN serialized value to Python values.
+
+ For details on supported types see GNValueParser.Parse() below.
+
+ If your GN script did:
+ something = [ "file1", "file2" ]
+ args = [ "--values=$something" ]
+ The command line would look something like:
+ --values="[ \"file1\", \"file2\" ]"
+ Which when interpreted as a command line gives the value:
+ [ "file1", "file2" ]
+
+ You can parse this into a Python list using GN rules with:
+ input_values = FromGNValues(options.values)
+ Although the Python 'ast' module will parse many forms of such input, it
+ will not handle GN escaping properly, nor GN booleans. You should use this
+ function instead.
+
+
+ A NOTE ON STRING HANDLING:
+
+ If you just pass a string on the command line to your Python script, or use
+ string interpolation on a string variable, the strings will not be quoted:
+ str = "asdf"
+ args = [ str, "--value=$str" ]
+ Will yield the command line:
+ asdf --value=asdf
+ The unquoted asdf string will not be valid input to this function, which
+ accepts only quoted strings like GN scripts. In such cases, you can just use
+ the Python string literal directly.
+
+ The main use cases for this is for other types, in particular lists. When
+ using string interpolation on a list (as in the top example) the embedded
+ strings will be quoted and escaped according to GN rules so the list can be
+ re-parsed to get the same result.
+ """
+ parser = GNValueParser(input_string)
+ return parser.Parse()
+
+
+def FromGNArgs(input_string):
+ """Converts a string with a bunch of gn arg assignments into a Python dict.
+
+ Given a whitespace-separated list of
+
+ = (integer | string | boolean | )
+
+ gn assignments, this returns a Python dict, i.e.:
+
+ FromGNArgs("foo=true\nbar=1\n") -> { 'foo': True, 'bar': 1 }.
+
+ Only simple types and lists supported; variables, structs, calls
+ and other, more complicated things are not.
+
+ This routine is meant to handle only the simple sorts of values that
+ arise in parsing --args.
+ """
+ parser = GNValueParser(input_string)
+ return parser.ParseArgs()
+
+
+def UnescapeGNString(value):
+ """Given a string with GN escaping, returns the unescaped string.
+
+ Be careful not to feed with input from a Python parsing function like
+ 'ast' because it will do Python unescaping, which will be incorrect when
+ fed into the GN unescaper."""
+ result = ''
+ i = 0
+ while i < len(value):
+ if value[i] == '\\':
+ if i < len(value) - 1:
+ next_char = value[i + 1]
+ if next_char in ('$', '"', '\\'):
+ # These are the escaped characters GN supports.
+ result += next_char
+ i += 1
+ else:
+ # Any other backslash is a literal.
+ result += '\\'
+ else:
+ result += value[i]
+ i += 1
+ return result
+
+
+def _IsDigitOrMinus(char):
+ return char in "-0123456789"
+
+
+class GNValueParser(object):
+ """Duplicates GN parsing of values and converts to Python types.
+
+ Normally you would use the wrapper function FromGNValue() below.
+
+ If you expect input as a specific type, you can also call one of the Parse*
+ functions directly. All functions throw GNException on invalid input.
+ """
+
+ def __init__(self, string):
+ self.input = string
+ self.cur = 0
+
+ def IsDone(self):
+ return self.cur == len(self.input)
+
+ def ConsumeWhitespace(self):
+ while not self.IsDone() and self.input[self.cur] in ' \t\n':
+ self.cur += 1
+
+ def Parse(self):
+ """Converts a string representing a printed GN value to the Python type.
+
+ See additional usage notes on FromGNString above.
+
+ - GN booleans ('true', 'false') will be converted to Python booleans.
+
+ - GN numbers ('123') will be converted to Python numbers.
+
+ - GN strings (double-quoted as in '"asdf"') will be converted to Python
+ strings with GN escaping rules. GN string interpolation (embedded
+ variables preceded by $) are not supported and will be returned as
+ literals.
+
+ - GN lists ('[1, "asdf", 3]') will be converted to Python lists.
+
+ - GN scopes ('{ ... }') are not supported.
+ """
+ result = self._ParseAllowTrailing()
+ self.ConsumeWhitespace()
+ if not self.IsDone():
+ raise GNException("Trailing input after parsing:\n " +
+ self.input[self.cur:])
+ return result
+
+ def ParseArgs(self):
+ """Converts a whitespace-separated list of ident=literals to a dict.
+
+ See additional usage notes on FromGNArgs, above.
+ """
+ d = {}
+
+ self.ConsumeWhitespace()
+ while not self.IsDone():
+ ident = self._ParseIdent()
+ self.ConsumeWhitespace()
+ if self.input[self.cur] != '=':
+ raise GNException("Unexpected token: " + self.input[self.cur:])
+ self.cur += 1
+ self.ConsumeWhitespace()
+ val = self._ParseAllowTrailing()
+ self.ConsumeWhitespace()
+ d[ident] = val
+
+ return d
+
+ def _ParseAllowTrailing(self):
+ """Internal version of Parse that doesn't check for trailing stuff."""
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException("Expected input to parse.")
+
+ next_char = self.input[self.cur]
+ if next_char == '[':
+ return self.ParseList()
+ elif _IsDigitOrMinus(next_char):
+ return self.ParseNumber()
+ elif next_char == '"':
+ return self.ParseString()
+ elif self._ConstantFollows('true'):
+ return True
+ elif self._ConstantFollows('false'):
+ return False
+ else:
+ raise GNException("Unexpected token: " + self.input[self.cur:])
+
+ def _ParseIdent(self):
+ ident = ''
+
+ next_char = self.input[self.cur]
+ if not next_char.isalpha() and not next_char == '_':
+ raise GNException("Expected an identifier: " + self.input[self.cur:])
+
+ ident += next_char
+ self.cur += 1
+
+ next_char = self.input[self.cur]
+ while next_char.isalpha() or next_char.isdigit() or next_char == '_':
+ ident += next_char
+ self.cur += 1
+ next_char = self.input[self.cur]
+
+ return ident
+
+ def ParseNumber(self):
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException('Expected number but got nothing.')
+
+ begin = self.cur
+
+ # The first character can include a negative sign.
+ if not self.IsDone() and _IsDigitOrMinus(self.input[self.cur]):
+ self.cur += 1
+ while not self.IsDone() and self.input[self.cur].isdigit():
+ self.cur += 1
+
+ number_string = self.input[begin:self.cur]
+ if not len(number_string) or number_string == '-':
+ raise GNException("Not a valid number.")
+ return int(number_string)
+
+ def ParseString(self):
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException('Expected string but got nothing.')
+
+ if self.input[self.cur] != '"':
+ raise GNException('Expected string beginning in a " but got:\n ' +
+ self.input[self.cur:])
+ self.cur += 1 # Skip over quote.
+
+ begin = self.cur
+ while not self.IsDone() and self.input[self.cur] != '"':
+ if self.input[self.cur] == '\\':
+ self.cur += 1 # Skip over the backslash.
+ if self.IsDone():
+ raise GNException("String ends in a backslash in:\n " +
+ self.input)
+ self.cur += 1
+
+ if self.IsDone():
+ raise GNException('Unterminated string:\n ' + self.input[begin:])
+
+ end = self.cur
+ self.cur += 1 # Consume trailing ".
+
+ return UnescapeGNString(self.input[begin:end])
+
+ def ParseList(self):
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException('Expected list but got nothing.')
+
+ # Skip over opening '['.
+ if self.input[self.cur] != '[':
+ raise GNException("Expected [ for list but got:\n " +
+ self.input[self.cur:])
+ self.cur += 1
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ raise GNException("Unterminated list:\n " + self.input)
+
+ list_result = []
+ previous_had_trailing_comma = True
+ while not self.IsDone():
+ if self.input[self.cur] == ']':
+ self.cur += 1 # Skip over ']'.
+ return list_result
+
+ if not previous_had_trailing_comma:
+ raise GNException("List items not separated by comma.")
+
+ list_result += [self._ParseAllowTrailing()]
+ self.ConsumeWhitespace()
+ if self.IsDone():
+ break
+
+ # Consume comma if there is one.
+ previous_had_trailing_comma = self.input[self.cur] == ','
+ if previous_had_trailing_comma:
+ # Consume comma.
+ self.cur += 1
+ self.ConsumeWhitespace()
+
+ raise GNException("Unterminated list:\n " + self.input)
+
+ def _ConstantFollows(self, constant):
+ """Returns true if the given constant follows immediately at the current
+ location in the input. If it does, the text is consumed and the function
+ returns true. Otherwise, returns false and the current position is
+ unchanged."""
+ end = self.cur + len(constant)
+ if end > len(self.input):
+ return False # Not enough room.
+ if self.input[self.cur:end] == constant:
+ self.cur = end
+ return True
+ return False
diff --git a/dsoftbus/build/loader/__init__.py b/dsoftbus/build/loader/__init__.py
new file mode 100755
index 0000000000000000000000000000000000000000..d7b3a083706fb60581a81c2a917e927139f61f7f
--- /dev/null
+++ b/dsoftbus/build/loader/__init__.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/dsoftbus/build/loader/build_gn.template b/dsoftbus/build/loader/build_gn.template
new file mode 100755
index 0000000000000000000000000000000000000000..b8bafdec5a1f43fefeb1a745b68e64a0bc16988f
--- /dev/null
+++ b/dsoftbus/build/loader/build_gn.template
@@ -0,0 +1,38 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("${root_build_dir}/build_configs/parts_list.gni")
+import("${root_build_dir}/build_configs/inner_kits_list.gni")
+import("${root_build_dir}/build_configs/system_kits_list.gni")
+import("${root_build_dir}/build_configs/parts_test_list.gni")
+
+group("parts_list") {
+ deps = parts_list
+}
+
+group("inner_kits") {
+ deps = inner_kits_list
+}
+
+group("system_kits") {
+ deps = system_kits_list
+}
+
+group("parts_test") {
+ testonly = true
+ deps = parts_test_list
+}
+
+group("phony_list") {
+ deps = [ "phony_targets:part_phony_targets" ]
+}
\ No newline at end of file
diff --git a/dsoftbus/build/loader/generate_targets_gn.py b/dsoftbus/build/loader/generate_targets_gn.py
new file mode 100755
index 0000000000000000000000000000000000000000..5d230b6c81dc8b16c7002bab336b43c14b3cd9ef
--- /dev/null
+++ b/dsoftbus/build/loader/generate_targets_gn.py
@@ -0,0 +1,203 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import sys
+import shutil
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # noqa: E402
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import write_file # noqa: E402
+
+parts_list_gni_template = """
+parts_list = [
+ {}
+]
+"""
+
+inner_kits_gni_template = """
+inner_kits_list = [
+ {}
+]
+"""
+
+system_kits_gni_template = """
+system_kits_list = [
+ {}
+]
+"""
+
+parts_test_gni_template = """
+parts_test_list = [
+ {}
+]
+"""
+
+phony_target_list_template = """
+group("part_phony_targets") {{
+ deps = [
+ {}
+ ]
+}}"""
+
+phony_group_template = """
+group("{}_phony") {{
+ deps = [ "{}" ]
+}}"""
+
+gn_file_template = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'build_gn.template')
+
+
+def gen_targets_gn(parts_targets, config_output_dir):
+ parts_list = []
+ inner_kits_list = []
+ system_kits_list = []
+ parts_test_list = []
+ phony_target_list = []
+ for part_labels in parts_targets.values():
+ parts_list.append(part_labels.get('part'))
+ if 'phony' in part_labels:
+ phony_target_list.append(part_labels.get('phony'))
+ if 'inner_kits' in part_labels:
+ inner_kits_list.append(part_labels.get('inner_kits'))
+ if 'system_kits' in part_labels:
+ system_kits_list.append(part_labels.get('system_kits'))
+ if 'test' in part_labels:
+ parts_test_list.append(part_labels.get('test'))
+ parts_list_gni_file = os.path.join(config_output_dir, 'parts_list.gni')
+ parts_list_content = '"{}",'.format('",\n "'.join(parts_list))
+ write_file(parts_list_gni_file,
+ parts_list_gni_template.format(parts_list_content))
+
+ inner_kits_gni_file = os.path.join(config_output_dir,
+ 'inner_kits_list.gni')
+ if inner_kits_list:
+ inner_kits_content = '"{}",'.format('",\n "'.join(inner_kits_list))
+ else:
+ inner_kits_content = ''
+ write_file(inner_kits_gni_file,
+ inner_kits_gni_template.format(inner_kits_content))
+
+ system_list_gni_file = os.path.join(config_output_dir,
+ 'system_kits_list.gni')
+ if system_kits_list:
+ system_kits_content = '"{}",'.format('",\n "'.join(system_kits_list))
+ else:
+ system_kits_content = ''
+ write_file(system_list_gni_file,
+ system_kits_gni_template.format(system_kits_content))
+
+ parts_test_gni_file = os.path.join(config_output_dir,
+ 'parts_test_list.gni')
+ if parts_test_list:
+ test_list_content = '"{}",'.format('",\n "'.join(parts_test_list))
+ else:
+ test_list_content = ''
+ write_file(parts_test_gni_file,
+ parts_test_gni_template.format(test_list_content))
+
+ build_gn_file = os.path.join(config_output_dir, 'BUILD.gn')
+ shutil.copyfile(gn_file_template, build_gn_file)
+
+
+def gen_phony_targets(variant_phony_targets, config_output_dir):
+ phony_target_list = []
+ phony_group_list = []
+ for part_name, part_label in variant_phony_targets.items():
+ phony_target_list.append('{}_phony'.format(part_name))
+ phony_group_list.append(
+ phony_group_template.format(part_name, part_label))
+
+ phony_list_content = ''
+ if phony_target_list:
+ phony_list_content = '":{}",'.format(
+ '",\n ":'.join(phony_target_list))
+ phony_build_content = []
+ phony_build_content.append(
+ phony_target_list_template.format(phony_list_content))
+ phony_build_content.extend(phony_group_list)
+
+ phony_build_file = os.path.join(config_output_dir, 'phony_targets',
+ 'BUILD.gn')
+ write_file(phony_build_file, '\n'.join(phony_build_content))
+
+
+def gen_stub_targets(parts_kits_info, platform_stubs, config_output_dir):
+ template = Template("""
+ # AUTO generated gn file, DONOT try to modify it.
+ import("//build/config/ohos/rules.gni")
+ import("//build/ohos/kits/kits_package.gni")
+ {% if combined_jar_deps %}
+ ohos_combine_jars("{{ platform }}_stub_kits_combine_java") {
+ deps = [
+ {{ combined_jar_deps }}
+ ]
+ }
+ {% endif %}
+
+ stub_jar("{{ platform }}_zframework_stub_java") {
+ deps = [
+ "//third_party/openjdk_stubs:rt_java",
+ "//build/ohos/kits/system_api:phone_systemsdk_base_java($default_toolchain)",
+ ]
+ {% if platform != "phone" %}
+ deps += [
+ "//build/ohos/kits/system_api:{{ platform }}_systemsdk_platform_java($default_toolchain)" # noqa: E501
+ ]
+ {% endif %}
+
+ {% if sources_list_files %}
+ sources_list_file = [ {{ sources_list_files }} ]
+ {% endif %}
+
+ {% if combined_jar_deps %}
+ sources_jar_deps = [":{{ platform }}_stub_kits_combine_java"]
+ {% endif %}
+ }
+ """,
+ trim_blocks=True,
+ lstrip_blocks=True)
+
+ for platform, stubs in platform_stubs.items():
+ gn_file = os.path.join(config_output_dir,
+ '{}-stub/BUILD.gn'.format(platform))
+ gni_file = os.path.join(config_output_dir,
+ '{}-stub/zframework_stub_exists.gni'.format(platform))
+ gni_contents = []
+ stub_kit_targets = []
+ dist_stub = []
+ parts = stubs.get('src')
+ for part in parts:
+ stub_kit_targets.extend(parts_kits_info.get(part))
+ if stubs.get('dist'):
+ dist_stub = stubs.get('dist')
+ if stub_kit_targets or dist_stub:
+ gni_contents.append('zframework_stub_exists = true')
+ gn_contents = template.render(
+ platform=platform,
+ combined_jar_deps=',\n'.join(stub_kit_targets),
+ sources_list_files=',\n'.join(dist_stub))
+ write_file(gn_file, gn_contents)
+ else:
+ gni_contents.append('zframework_stub_exists = false')
+
+ write_file(gni_file, '\n'.join(gni_contents))
diff --git a/dsoftbus/build/loader/load.py b/dsoftbus/build/loader/load.py
new file mode 100755
index 0000000000000000000000000000000000000000..9c35faa8a173ac236143cef5788722dc189f73d2
--- /dev/null
+++ b/dsoftbus/build/loader/load.py
@@ -0,0 +1,452 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import sys
+import argparse
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from loader import subsystem_info # noqa: E402
+from loader import platforms_loader # noqa: E402
+from loader import generate_targets_gn # noqa: E402
+from loader import load_ohos_build # noqa: E402
+from scripts.util.file_utils import read_json_file, write_json_file, write_file # noqa: E402, E501
+
+
+def _load_component_dist(source_root_dir, target_os, target_cpu):
+ _parts_variants_info = {}
+ _dir = "component_dist/{}-{}/packages_to_install".format(
+ target_os, target_cpu)
+ _file_name = "dist_parts_info.json"
+ _dist_parts_info_file = os.path.join(source_root_dir, _dir, _file_name)
+ if not os.path.exists(_dist_parts_info_file):
+ # If the file does not exist, do nothing and return
+ return _parts_variants_info
+ _parts_info = read_json_file(_dist_parts_info_file)
+ if _parts_info is None:
+ raise Exception("read file '{}' failed.".format(_dist_parts_info_file))
+ for _part_info in _parts_info:
+ origin_part_name = _part_info.get('origin_part_name')
+ if origin_part_name in _parts_variants_info:
+ variants = _parts_variants_info.get(origin_part_name)
+ else:
+ variants = []
+ _variant_name = _part_info.get('variant_name')
+ variants.append(_variant_name)
+ _parts_variants_info[origin_part_name] = variants
+ return _parts_variants_info
+
+
+def _get_real_part_name(original_part_name, current_platform, parts_variants):
+ part_info = parts_variants.get(original_part_name)
+ if part_info is None:
+ return None, None
+ if current_platform in part_info and current_platform != 'phone':
+ real_name = '{}_{}'.format(original_part_name, current_platform)
+ else:
+ real_name = original_part_name
+ return real_name, original_part_name
+
+
+def _get_platforms_all_parts(source_root_dir, target_os, target_cpu, all_parts,
+ build_platforms, parts_variants):
+ _dist_parts_variants = _load_component_dist(source_root_dir, target_os,
+ target_cpu)
+ target_platform_parts = {}
+ for _platform, _parts in all_parts.items():
+ if _platform not in build_platforms:
+ continue
+ part_name_info = {}
+ for part_def in _parts:
+ real_name, original_name = _get_real_part_name(
+ part_def, _platform, parts_variants)
+ if real_name is None:
+ # find this from component_dist
+ real_name, original_name = _get_real_part_name(
+ part_def, _platform, _dist_parts_variants)
+ if real_name is None:
+ continue
+ part_name_info[real_name] = original_name
+ target_platform_parts[_platform] = part_name_info
+ return target_platform_parts
+
+
+def _get_platforms_all_stubs(source_root_dir, target_os, target_cpu, all_stubs,
+ build_platforms, parts_variants):
+ _dist_parts_variants = _load_component_dist(source_root_dir, target_os,
+ target_cpu)
+ platform_stubs = {}
+ for _platform, _part_names in all_stubs.items():
+ if _platform not in build_platforms:
+ continue
+ stub_parts_from_src = []
+ stub_parts_from_dist = []
+ for part_name in _part_names:
+ real_name, original_name = _get_real_part_name(
+ part_name, _platform, parts_variants)
+ # real_name=None means part_name doesn't exist in source tree,
+ # use binary in component_dist then.
+ if real_name is None:
+ # find this from component_dist
+ real_name, original_name = _get_real_part_name(
+ part_name, _platform, _dist_parts_variants)
+ if real_name is None:
+ continue
+ else:
+ stub_sources = os.path.join(
+ source_root_dir,
+ "component_dist/{}-{}/api_stubs/{}/stubs_sources_list.txt" # noqa: E501
+ .format(target_os, target_cpu, real_name))
+ stub_parts_from_dist.append('"{}"'.format(stub_sources))
+ else:
+ stub_parts_from_src.append(real_name)
+ platform_stubs[_platform] = {
+ "src": stub_parts_from_src,
+ "dist": stub_parts_from_dist,
+ }
+ return platform_stubs
+
+
+def _get_platforms_parts(src_parts_targets, target_platform_parts):
+ platforms_parts = {}
+ src_all_parts = src_parts_targets.keys()
+ for _platform, _all_parts in target_platform_parts.items():
+ src_parts_list = []
+ no_src_parts_list = []
+ for _part in _all_parts.keys():
+ if _part in src_all_parts:
+ src_parts_list.append(_part)
+ else:
+ no_src_parts_list.append(_part)
+ _data = {
+ 'src_parts': src_parts_list,
+ 'no_src_parts': no_src_parts_list
+ }
+ platforms_parts[_platform] = _data
+ return platforms_parts
+
+
+def _get_parts_by_platform(target_platform_parts):
+ parts_info = {}
+ if 'phone' in target_platform_parts:
+ phone_parts_list = target_platform_parts.get('phone').keys()
+ else:
+ phone_parts_list = []
+ for _platform, _parts_info in target_platform_parts.items():
+ base_parts_list = []
+ curr_parts_list = []
+ for _real_name, _original_name in _parts_info.items():
+ if _real_name in phone_parts_list:
+ base_parts_list.append(_real_name)
+ elif _original_name in phone_parts_list:
+ base_parts_list.append(_real_name)
+ else:
+ curr_parts_list.append(_real_name)
+ result_data = {
+ "base_parts_list": base_parts_list,
+ "curr_parts_list": curr_parts_list
+ }
+ parts_info[_platform] = result_data
+ return parts_info
+
+
+def _check_parts_config_info(parts_config_info):
+ if not ('parts_info' in parts_config_info
+ and 'subsystem_parts' in parts_config_info
+ and 'parts_variants' in parts_config_info
+ and 'parts_kits_info' in parts_config_info
+ and 'parts_inner_kits_info' in parts_config_info
+ and 'parts_targets' in parts_config_info):
+ raise Exception("Loading ohos.build information is incorrect.")
+
+
+def _get_required_build_parts_list(target_platform_parts):
+ parts_set = set()
+ for _parts_list in target_platform_parts.values():
+ parts_set.update(_parts_list)
+ return list(parts_set)
+
+
+def _get_required_build_targets(parts_targets, target_platform_parts):
+ required_build_targets = {}
+ _parts_list = _get_required_build_parts_list(target_platform_parts)
+ for _p_name, _info in parts_targets.items():
+ if _p_name not in _parts_list:
+ continue
+ required_build_targets[_p_name] = _info
+ return required_build_targets
+
+
+def _get_parts_src_list(required_parts_targets, parts_info):
+ parts_name_map = {}
+ for _list in parts_info.values():
+ for _info in _list:
+ parts_name_map[_info.get('part_name')] = _info.get(
+ 'origin_part_name')
+ _src_set = set()
+ for _name in required_parts_targets.keys():
+ _origin_name = parts_name_map.get(_name)
+ if _origin_name is None:
+ continue
+ _src_set.add(_origin_name)
+ return list(_src_set)
+
+
+def _check_args(args, source_root_dir):
+ print('args:', args)
+ if 'gn_root_out_dir' not in args:
+ raise Exception("args gn_root_out_dir is required.")
+ if 'platforms_config_file' not in args:
+ raise Exception("args platforms_config_file is required.")
+ if 'subsystem_config_file' not in args:
+ raise Exception("args subsystem_config_file is required.")
+ gn_root_out_dir = args.gn_root_out_dir
+ if gn_root_out_dir.startswith('/'):
+ args.gn_root_out_dir = os.path.relpath(args.gn_root_out_dir,
+ source_root_dir)
+ else:
+ _real_out_dir = os.path.realpath(gn_root_out_dir)
+ if not _real_out_dir.startswith(source_root_dir):
+ raise Exception("args gn_root_out_dir is incorrect.")
+
+
+def load(source_root_dir, args):
+ _check_args(args, source_root_dir)
+ config_output_relpath = os.path.join(args.gn_root_out_dir, 'build_configs')
+
+ # loading subsystem info, scan src dir and get subsystem ohos.build
+ _subsystem_info = subsystem_info.get_subsystem_info(
+ args.subsystem_config_file, args.example_subsystem_file,
+ source_root_dir, config_output_relpath)
+
+ target_arch = '{}_{}'.format(args.target_os, args.target_cpu)
+ # loading platforms config
+ _platforms_info = platforms_loader.get_platforms_info(
+ args.platforms_config_file, source_root_dir, args.gn_root_out_dir,
+ target_arch, config_output_relpath, args.scalable_build)
+
+ # get build platforms list
+ toolchain_to_variant_dict = _platforms_info.get('variant_toolchain_info')
+ variant_toolchains = toolchain_to_variant_dict.get('platform_toolchain')
+ _all_platforms = variant_toolchains.keys()
+
+ if args.build_platform_name == 'all':
+ build_platforms = _all_platforms
+ elif args.build_platform_name in _all_platforms:
+ build_platforms = [args.build_platform_name]
+ else:
+ raise Exception(
+ "The target_platform is incorrect, only allows [{}].".format(
+ ', '.join(_all_platforms)))
+
+ # loading ohos.build and gen part variant info
+ parts_config_info = load_ohos_build.get_parts_info(
+ source_root_dir, config_output_relpath, _subsystem_info,
+ variant_toolchains, target_arch, args.ignore_api_check,
+ args.build_xts)
+ # check parts_config_info
+ _check_parts_config_info(parts_config_info)
+ parts_variants = parts_config_info.get('parts_variants')
+ parts_targets = parts_config_info.get('parts_targets')
+ parts_info = parts_config_info.get('parts_info')
+
+ config_output_dir = os.path.join(source_root_dir, config_output_relpath)
+
+ # target_platforms_parts.json
+ target_platform_parts = _get_platforms_all_parts(source_root_dir,
+ args.target_os,
+ args.target_cpu,
+ _platforms_info.get('all_parts'), build_platforms, parts_variants)
+ target_platform_parts_file = os.path.join(config_output_dir,
+ "target_platforms_parts.json")
+ write_json_file(target_platform_parts_file,
+ target_platform_parts,
+ check_changes=True)
+
+ # {platform}_system_capabilities.json
+ # we assume that platform and devicetype are the same.
+ for platform in build_platforms:
+ platform_parts = target_platform_parts.get(platform)
+ platform_capabilities = []
+ for _, origin in platform_parts.items():
+ # parts_info.get() might be None if the part is a binary package
+ all_parts_variants = parts_info.get(origin)
+ if all_parts_variants is None:
+ continue
+ part = all_parts_variants[0]
+ if part.get('system_capabilities'):
+ entry = part.get('system_capabilities')
+ if len(entry) > 0:
+ platform_capabilities.extend(entry)
+ platform_part_json_file = os.path.join(
+ config_output_dir, "{0}_system_capabilities.json".format(platform))
+ write_json_file(platform_part_json_file,
+ sorted(platform_capabilities),
+ check_changes=True)
+
+ target_platform_stubs = _get_platforms_all_stubs(
+ source_root_dir, args.target_os, args.target_cpu,
+ _platforms_info.get('all_stubs'), build_platforms, parts_variants)
+ generate_targets_gn.gen_stub_targets(
+ parts_config_info.get('parts_kits_info'), target_platform_stubs,
+ config_output_dir)
+
+ # platforms_parts_by_src.json
+ platforms_parts_by_src = _get_platforms_parts(parts_targets,
+ target_platform_parts)
+ platforms_parts_by_src_file = os.path.join(source_root_dir,
+ config_output_relpath,
+ "platforms_parts_by_src.json")
+ write_json_file(platforms_parts_by_src_file,
+ platforms_parts_by_src,
+ check_changes=True)
+
+ required_parts_targets = _get_required_build_targets(
+ parts_targets, target_platform_parts)
+ generate_targets_gn.gen_targets_gn(required_parts_targets,
+ config_output_dir)
+ _phony_target = parts_config_info.get('phony_target')
+ required_phony_targets = _get_required_build_targets(
+ _phony_target, target_platform_parts)
+ generate_targets_gn.gen_phony_targets(required_phony_targets,
+ config_output_dir)
+
+ # required_parts_targets.json
+ build_targets_info_file = os.path.join(config_output_dir,
+ "required_parts_targets.json")
+ write_json_file(build_targets_info_file, required_parts_targets)
+ # required_parts_targets_list.json
+ build_targets_list_file = os.path.join(config_output_dir,
+ "required_parts_targets_list.json")
+ write_json_file(build_targets_list_file,
+ list(required_parts_targets.values()))
+
+ # parts src flag file
+ parts_src_flag_file = os.path.join(config_output_dir,
+ "parts_src_flag.json")
+ write_json_file(parts_src_flag_file,
+ _get_parts_src_list(required_parts_targets, parts_info),
+ check_changes=True)
+
+ # write platforms_list.gni
+ platforms_list_gni_file = os.path.join(config_output_dir,
+ "platforms_list.gni")
+ _platforms = set(build_platforms)
+ _gni_file_content = []
+ _gni_file_content.append('target_platform_list = [')
+ _gni_file_content.append(' "{}"'.format('",\n "'.join(_platforms)))
+ _gni_file_content.append(']')
+ _gni_file_content.append('kits_platform_list = [')
+ _gni_file_content.append(' "{}",'.format('",\n "'.join(_platforms)))
+ if 'phone' not in build_platforms:
+ _gni_file_content.append(' "phone"')
+ _gni_file_content.append(']')
+ write_file(platforms_list_gni_file, '\n'.join(_gni_file_content))
+
+ # parts_different_info.json
+ # Generate parts differences in different platforms, using phone as base.
+ parts_different_info = _get_parts_by_platform(target_platform_parts)
+ parts_different_info_file = os.path.join(config_output_dir,
+ "parts_different_info.json")
+ write_json_file(parts_different_info_file,
+ parts_different_info,
+ check_changes=True)
+ # for testfwk
+ infos_for_testfwk_file = os.path.join(config_output_dir,
+ "infos_for_testfwk.json")
+ _output_infos_for_testfwk(parts_config_info, target_platform_parts,
+ infos_for_testfwk_file)
+
+
+def _output_infos_by_platform(part_name_infos, parts_info_dict):
+ required_parts = {}
+ subsystem_infos = {}
+ for part_name, origin_part_name in part_name_infos.items():
+ part_info = parts_info_dict.get(part_name)
+ if part_info is None:
+ continue
+ if origin_part_name != part_info.get('origin_part_name'):
+ raise Exception("part configuration is incorrect.")
+ required_parts[origin_part_name] = part_info
+ _subsystem_name = part_info.get('subsystem_name')
+ if _subsystem_name in subsystem_infos:
+ p_list = subsystem_infos.get(_subsystem_name)
+ else:
+ p_list = []
+ p_list.append(origin_part_name)
+ subsystem_infos[_subsystem_name] = p_list
+ result = {}
+ result['subsystem_infos'] = subsystem_infos
+ result['part_infos'] = required_parts
+ return result
+
+
+def _output_infos_for_testfwk(parts_config_info, target_platform_parts,
+ infos_for_testfwk_file):
+ parts_info = parts_config_info.get('parts_info')
+ parts_info_dict = {}
+ for _part_name, _parts in parts_info.items():
+ for _info in _parts:
+ parts_info_dict[_info.get('part_name')] = _info
+
+ _output_infos = {}
+ for _platform, _parts in target_platform_parts.items():
+ result = _output_infos_by_platform(_parts, parts_info_dict)
+ _output_infos[_platform] = result
+
+ write_json_file(infos_for_testfwk_file, _output_infos, check_changes=True)
+
+
+def find_root_dir():
+ source_dir = os.getcwd()
+ while not os.path.exists(os.path.join(source_dir, '.gn')):
+ source_dir = os.path.dirname(source_dir)
+ if source_dir == '/':
+ raise Exception("Cannot find source root directory.")
+ return source_dir
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--platforms-config-file', required=True)
+ parser.add_argument('--subsystem-config-file', required=True)
+ parser.add_argument('--example-subsystem-file', required=False)
+ parser.add_argument('--source-root-dir', required=True)
+ parser.add_argument('--gn-root-out-dir', default='.')
+ parser.add_argument('--build-platform-name', default='phone')
+ parser.add_argument('--build-xts', dest='build_xts', action='store_true')
+ parser.set_defaults(build_xts=False)
+ parser.add_argument('--target-os', default='ohos')
+ parser.add_argument('--target-cpu', default='arm64')
+ parser.add_argument('--ignore-api-check', nargs='*', default=[])
+
+ parser.add_argument('--scalable-build', action='store_true')
+ parser.set_defaults(scalable_build=False)
+ args = parser.parse_args()
+ if args.source_root_dir:
+ dot_gn_file = os.path.join(args.source_root_dir, '.gn')
+ if not os.path.exists(dot_gn_file):
+ raise Exception("source root dir is incorrect.")
+ source_root_dir = args.source_root_dir
+ else:
+ source_root_dir = find_root_dir()
+
+ load(source_root_dir, args)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/loader/load_ohos_build.py b/dsoftbus/build/loader/load_ohos_build.py
new file mode 100755
index 0000000000000000000000000000000000000000..7419819e31887b1ab8467cc3139503d316046ce6
--- /dev/null
+++ b/dsoftbus/build/loader/load_ohos_build.py
@@ -0,0 +1,588 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import shutil
+import filecmp
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, write_json_file, write_file # noqa: E402, E501 pylint: disable=C0413, E0611
+from scripts.util import build_utils # noqa: E402 pylint: disable=C0413
+
+import_list = """
+# import("//build/ohos.gni")
+# import("//build/ohos_var.gni")
+import("//build/ohos/ohos_part.gni")
+import("//build/ohos/ohos_kits.gni")
+import("//build/ohos/ohos_test.gni")
+"""
+
+part_template = """
+ohos_part("{}") {{
+ subsystem_name = "{}"
+ module_list = [
+ {}
+ ]
+ origin_name = "{}"
+ variant = "{}"
+}}"""
+
+inner_kits_template = """
+ohos_inner_kits("{0}_inner_kits") {{
+ sdk_libs = [
+{1}
+ ]
+ part_name = "{0}"
+ origin_name = "{2}"
+ variant = "{3}"
+}}"""
+
+system_kits_template = """
+ohos_system_kits("{0}_system_kits") {{
+ sdk_libs = [
+{1}
+ ]
+ part_name = "{0}"
+ origin_name = "{2}"
+ variant = "{3}"
+}}"""
+
+test_template = """
+ohos_part_test("{0}_test") {{
+ testonly = true
+ test_packages = [
+ {1}
+ ]
+ deps = [":{0}"]
+ part_name = "{0}"
+ subsystem_name = "{2}"
+}}"""
+
+
+def _normalize(label, path):
+ if not label.startswith('//'):
+ label = '//{}/{}'.format(path, label)
+ return label
+
+
+def read_build_file(ohos_build_file):
+ if not os.path.exists(ohos_build_file):
+ raise Exception(
+ "config file '{}' doesn't exist.".format(ohos_build_file))
+ subsystem_config = read_json_file(ohos_build_file)
+ if subsystem_config is None:
+ raise Exception("read file '{}' failed.".format(ohos_build_file))
+ return subsystem_config
+
+
+class PartObject(object):
+ """"part object info, description part variant."""
+ def __init__(self, part_name, variant_name, part_config, toolchain,
+ subsystem_name, target_arch):
+ self._origin_name = part_name
+ if variant_name != 'phone':
+ _real_name = '{}_{}'.format(part_name, variant_name)
+ else:
+ _real_name = part_name
+ self._part_name = _real_name
+ self._variant_name = variant_name
+ self._subsystem_name = subsystem_name
+ self._feature_name = None
+ self._toolchain = toolchain
+ self._inner_kits_info = {}
+ self._kits = []
+ self._target_arch = target_arch
+ self._system_capabilities = []
+ self._parsing_config(self._part_name, part_config, subsystem_name)
+
+ @classmethod
+ def _parsing_kits_lib(cls, kit_lib, is_inner_kits=False):
+ lib_config = []
+ lib_type = kit_lib.get('type')
+ if lib_type is None:
+ lib_type = 'so' if is_inner_kits else 'jar'
+ label = kit_lib.get('name')
+ if label is None:
+ raise Exception("kits lib config incorrect, required for name.")
+ lib_config.append(' type = "{}"'.format(lib_type))
+ lib_config.append(' name = "{}"'.format(label))
+ if lib_type == 'so' and 'header' in kit_lib:
+ header = kit_lib.get('header')
+ header_files = header.get('header_files')
+ lib_config.append(' header = {')
+ lib_config.append(' header_files = [')
+ for h_file in header_files:
+ lib_config.append(' "{}",'.format(h_file))
+ lib_config.append(' ]')
+ header_base = header.get('header_base')
+ lib_config.append(' header_base = "{}"'.format(header_base))
+ lib_config.append(' }')
+ if is_inner_kits is False and 'javadoc' in kit_lib:
+ javadoc_val = kit_lib.get('javadoc')
+ lib_config.append(' javadoc = {')
+ resource_dir = javadoc_val.get('resource_dir')
+ lib_config.append(
+ ' resource_dir = "{}"'.format(resource_dir))
+ lib_config.append(' }')
+ return lib_config
+
+ def _parsing_inner_kits(self, part_name, inner_kits_info, build_gn_content,
+ target_arch):
+ inner_kits_libs_gn = []
+ for inner_kits_lib in inner_kits_info:
+ inner_kits_libs_gn.append(' {')
+ inner_kits_libs_gn.extend(
+ self._parsing_kits_lib(inner_kits_lib, True))
+ inner_kits_libs_gn.append(' },')
+
+ inner_kits_libs_gn_line = '\n'.join(inner_kits_libs_gn)
+ inner_kits_def = inner_kits_template.format(part_name,
+ inner_kits_libs_gn_line,
+ self._origin_name,
+ self._variant_name)
+ build_gn_content.append(inner_kits_def)
+ # output inner kits info to resolve external deps
+ _libs_info = {}
+ for inner_kits_lib in inner_kits_info:
+ info = {'part_name': part_name}
+ label = inner_kits_lib.get('name')
+ lib_name = label.split(':')[1]
+ info['label'] = label
+ info['name'] = lib_name
+ lib_type = inner_kits_lib.get('type')
+ if lib_type is None:
+ lib_type = 'so'
+ info['type'] = lib_type
+ prebuilt = inner_kits_lib.get('prebuilt_enable')
+ if prebuilt:
+ info['prebuilt_enable'] = prebuilt
+ prebuilt_source_libs = inner_kits_lib.get('prebuilt_source')
+ prebuilt_source = prebuilt_source_libs.get(target_arch)
+ info['prebuilt_source'] = prebuilt_source
+ else:
+ info['prebuilt_enable'] = False
+ # header files
+ if lib_type == 'so':
+ header = inner_kits_lib.get('header')
+ if header is None:
+ raise Exception(
+ "header not configuration, part_name = '{}'".format(
+ part_name))
+ header_base = header.get('header_base')
+ if header_base is None:
+ raise Exception(
+ "header base not configuration, part_name = '{}'".
+ format(part_name))
+ info['header_base'] = header_base
+ info['header_files'] = header.get('header_files')
+ _libs_info[lib_name] = info
+ self._inner_kits_info = _libs_info
+
+ def _parsing_system_kits(self, part_name, system_kits_info,
+ build_gn_content):
+ system_kits_libs_gn = []
+ kits = []
+ for _kits_lib in system_kits_info:
+ system_kits_libs_gn.append(' {')
+ system_kits_libs_gn.extend(self._parsing_kits_lib(
+ _kits_lib, False))
+ kits.append('"{}"'.format(_kits_lib.get('name')))
+ system_kits_libs_gn.append(' },')
+ _kits_libs_gn_line = '\n'.join(system_kits_libs_gn)
+ system_kits_def = system_kits_template.format(part_name,
+ _kits_libs_gn_line,
+ self._origin_name,
+ self._variant_name)
+ build_gn_content.append(system_kits_def)
+ self._kits = kits
+
+ def _parsing_config(self, part_name, part_config, subsystem_name):
+ self._part_target_list = []
+ build_gn_content = []
+ build_gn_content.append(import_list)
+
+ # ohos part
+ if 'module_list' not in part_config:
+ raise Exception(
+ "ohos.build incorrect, part name: '{}'".format(part_name))
+ module_list = part_config.get('module_list')
+ if len(module_list) == 0:
+ module_list_line = ''
+ else:
+ module_list_line = '"{}",'.format('",\n "'.join(module_list))
+ parts_definition = part_template.format(part_name, subsystem_name,
+ module_list_line,
+ self._origin_name,
+ self._variant_name)
+ build_gn_content.append(parts_definition)
+
+ # part inner kits
+ if part_config.get('inner_kits'):
+ self._part_target_list.append('inner_kits')
+ inner_kits_info = part_config.get('inner_kits')
+ self._parsing_inner_kits(part_name, inner_kits_info,
+ build_gn_content, self._target_arch)
+ # part system kits
+ if part_config.get('system_kits'):
+ self._part_target_list.append('system_kits')
+ system_kits_info = part_config.get('system_kits')
+ self._parsing_system_kits(part_name, system_kits_info,
+ build_gn_content)
+ # part test list
+ if part_config.get('test_list'):
+ self._part_target_list.append('test')
+ test_list = part_config.get('test_list')
+ test_list_line = '"{}",'.format('",\n "'.join(test_list))
+ test_def = test_template.format(part_name, test_list_line,
+ subsystem_name)
+ build_gn_content.append(test_def)
+ self._build_gn_content = build_gn_content
+ # feature
+ if part_config.get('feature_name'):
+ self._feature_name = part_config.get('feature_name')
+
+ # system_capabilities is a list attribute of a part in ohos.build
+ if part_config.get('system_capabilities'):
+ self._system_capabilities = part_config.get('system_capabilities')
+
+ def part_name(self):
+ """part name."""
+ return self._part_name
+
+ def part_variant(self):
+ """part variant."""
+ return self._variant_name
+
+ def toolchain(self):
+ """current part variant toolchain."""
+ return self._toolchain
+
+ def part_inner_kits(self):
+ """part inner kits."""
+ return self._inner_kits_info
+
+ def part_kits(self):
+ """part kits."""
+ return self._kits
+
+ def write_build_gn(self, config_output_dir):
+ """output build gn."""
+ part_gn_file = os.path.join(config_output_dir, self._part_name,
+ 'BUILD.gn')
+ write_file(part_gn_file, '\n'.join(self._build_gn_content))
+
+ def get_target_label(self, config_output_relpath):
+ """target label."""
+ if config_output_relpath.startswith('/'):
+ raise Exception("args config output relative path is incorrect.")
+ return "//{0}/{1}:{1}({2})".format(config_output_relpath,
+ self._part_name, self._toolchain)
+
+ def part_group_targets(self, config_output_relpath):
+ """part group target."""
+ if config_output_relpath.startswith('/'):
+ raise Exception("args config output relative path is incorrect.")
+ _labels = {}
+ _labels['part'] = self.get_target_label(config_output_relpath)
+ for group_label in self._part_target_list:
+ if group_label == 'phony':
+ _labels[group_label] = "//{0}/{1}:{1}_{2}".format(
+ config_output_relpath, self._part_name, group_label)
+ continue
+ _labels[group_label] = "//{0}/{1}:{1}_{2}({3})".format(
+ config_output_relpath, self._part_name, group_label,
+ self._toolchain)
+ return _labels
+
+ def part_info(self):
+ """part info."""
+ _info = {}
+ _info['part_name'] = self._part_name
+ _info['origin_part_name'] = self._origin_name
+ _info['toolchain_label'] = self._toolchain
+ _info['variant_name'] = self._variant_name
+ _info['subsystem_name'] = self._subsystem_name
+ _info['system_capabilities'] = self._system_capabilities
+
+ if self._feature_name:
+ _info['feature_name'] = self._feature_name
+ if self._variant_name != 'phone':
+ toolchain_name = self._toolchain.split(':')[1]
+ _build_out_dir = toolchain_name
+ else:
+ _build_out_dir = '.'
+ _info['build_out_dir'] = _build_out_dir
+ return _info
+
+
+class LoadBuildConfig(object):
+ """load build config file and parse configuration info."""
+ def __init__(self, source_root_dir, subsystem_build_info,
+ config_output_dir, variant_toolchains, subsystem_name,
+ target_arch, ignored_subsystems):
+ self._source_root_dir = source_root_dir
+ self._build_info = subsystem_build_info
+ self._config_output_relpath = config_output_dir
+ self._is_load = False
+ self._parts_variants = {}
+ self._part_list = {}
+ self._part_targets_label = {}
+ self._variant_toolchains = variant_toolchains
+ self._subsystem_name = subsystem_name
+ self._target_arch = target_arch
+ self._ignored_subsystems = ignored_subsystems
+ self._parts_info_dict = {}
+ self._phony_targets = {}
+ self._parts_path_dict = {}
+
+ def _parsing_config(self, parts_config):
+ _parts_info_dict = {}
+ _variant_phony_targets = {}
+ for part_name, value in parts_config.items():
+ if 'variants' in value:
+ variants = value.get('variants')
+ if len(variants) == 0:
+ variants = ['phone']
+ else:
+ variants = ['phone']
+ _build_target = {}
+ _targets_label = {}
+ _parts_info = []
+ for variant in variants:
+ toolchain = self._variant_toolchains.get(variant)
+ if toolchain is None:
+ continue
+ part_obj = PartObject(part_name, variant, value, toolchain,
+ self._subsystem_name, self._target_arch)
+ real_part_name = part_obj.part_name()
+ self._part_list[real_part_name] = part_obj
+
+ subsystem_config_dir = os.path.join(
+ self._config_output_relpath, self._subsystem_name)
+ part_obj.write_build_gn(
+ os.path.join(self._source_root_dir, subsystem_config_dir))
+
+ _target_label = part_obj.get_target_label(subsystem_config_dir)
+ _build_target[variant] = _target_label
+ _targets_label[real_part_name] = part_obj.part_group_targets(
+ subsystem_config_dir)
+ _parts_info.append(part_obj.part_info())
+ if variant != 'phone':
+ _variant_phony_targets[real_part_name] = _target_label
+ self._part_targets_label.update(_targets_label)
+ self._parts_variants[part_name] = _build_target
+ _parts_info_dict[part_name] = _parts_info
+ self._parts_info_dict = _parts_info_dict
+ self._phony_targets = _variant_phony_targets
+
+ def _merge_build_config(self):
+ _build_files = self._build_info.get('build_files')
+ subsystem_name = None
+ parts_info = {}
+ parts_path_dict = {}
+ for _build_file in _build_files:
+ _parts_config = read_build_file(_build_file)
+ _subsystem_name = _parts_config.get('subsystem')
+ if subsystem_name and _subsystem_name != subsystem_name:
+ raise Exception(
+ "subsystem name config incorrect in '{}'.".format(
+ _build_file))
+ subsystem_name = _subsystem_name
+ _curr_parts_info = _parts_config.get('parts')
+ for _pname in _curr_parts_info.keys():
+ parts_path_dict[_pname] = os.path.relpath(
+ os.path.dirname(_build_file), self._source_root_dir)
+ parts_info.update(_curr_parts_info)
+ subsystem_config = {}
+ subsystem_config['subsystem'] = subsystem_name
+ subsystem_config['parts'] = parts_info
+ return subsystem_config, parts_path_dict
+
+ def parse(self):
+ """parse part info from build config file."""
+ if self._is_load:
+ return
+ subsystem_config, parts_path_dict = self._merge_build_config()
+ parts_config = subsystem_config.get('parts')
+ self._parsing_config(parts_config)
+ self._parts_path_dict = parts_path_dict
+ self._is_load = True
+
+ def parts_variants(self):
+ """parts varinats info."""
+ self.parse()
+ return self._parts_variants
+
+ def parts_inner_kits_info(self):
+ """parts inner kits info."""
+ self.parse()
+ _parts_inner_kits = {}
+ for part_obj in self._part_list.values():
+ _parts_inner_kits[
+ part_obj.part_name()] = part_obj.part_inner_kits()
+ return _parts_inner_kits
+
+ def parts_build_targets(self):
+ """parts build target label."""
+ self.parse()
+ return self._part_targets_label
+
+ def parts_name_list(self):
+ """parts name list."""
+ self.parse()
+ return list(self._part_list.keys())
+
+ def parts_info(self):
+ """parts info."""
+ self.parse()
+ return self._parts_info_dict
+
+ def parts_phony_target(self):
+ """parts phony target info"""
+ self.parse()
+ return self._phony_targets
+
+ def parts_kits_info(self):
+ """parts kits info."""
+ self.parse()
+ _parts_kits = {}
+ for part_obj in self._part_list.values():
+ _parts_kits[part_obj.part_name()] = part_obj.part_kits()
+ return _parts_kits
+
+ def parts_path_info(self):
+ """parts to path info."""
+ self.parse()
+ return self._parts_path_dict
+
+
+def _output_parts_info(parts_config_dict, config_output_path):
+ parts_info_output_path = os.path.join(config_output_path, "parts_info")
+ # parts_info.json
+ if 'parts_info' in parts_config_dict:
+ parts_info = parts_config_dict.get('parts_info')
+ parts_info_file = os.path.join(parts_info_output_path,
+ "parts_info.json")
+ write_json_file(parts_info_file, parts_info)
+ _part_subsystem_dict = {}
+ for key, value in parts_info.items():
+ for _info in value:
+ _sub_name = _info.get('subsystem_name')
+ _part_subsystem_dict[key] = _sub_name
+ break
+ _part_subsystem_file = os.path.join(parts_info_output_path,
+ "part_subsystem.json")
+ write_json_file(_part_subsystem_file, _part_subsystem_dict)
+
+ # subsystem_parts.json
+ if 'subsystem_parts' in parts_config_dict:
+ subsystem_parts = parts_config_dict.get('subsystem_parts')
+ subsystem_parts_file = os.path.join(parts_info_output_path,
+ "subsystem_parts.json")
+ write_json_file(subsystem_parts_file, subsystem_parts)
+
+ # parts_variants.json
+ if 'parts_variants' in parts_config_dict:
+ parts_variants = parts_config_dict.get('parts_variants')
+ parts_variants_info_file = os.path.join(parts_info_output_path,
+ "parts_variants.json")
+ write_json_file(parts_variants_info_file, parts_variants)
+
+ # inner_kits_info.json
+ if 'parts_inner_kits_info' in parts_config_dict:
+ parts_inner_kits_info = parts_config_dict.get('parts_inner_kits_info')
+ parts_inner_kits_info_file = os.path.join(parts_info_output_path,
+ "inner_kits_info.json")
+ write_json_file(parts_inner_kits_info_file, parts_inner_kits_info)
+
+ # parts_targets.json
+ if 'parts_targets' in parts_config_dict:
+ parts_targets = parts_config_dict.get('parts_targets')
+ parts_targets_info_file = os.path.join(parts_info_output_path,
+ "parts_targets.json")
+ write_json_file(parts_targets_info_file, parts_targets)
+
+ # phony_targets.json
+ if 'phony_target' in parts_config_dict:
+ phony_target = parts_config_dict.get('phony_target')
+ phony_target_info_file = os.path.join(parts_info_output_path,
+ "phony_target.json")
+ write_json_file(phony_target_info_file, phony_target)
+
+ # paths_path_info.json
+ if 'parts_path_info' in parts_config_dict:
+ parts_path_info = parts_config_dict.get('parts_path_info')
+ parts_path_info_file = os.path.join(parts_info_output_path,
+ 'parts_path_info.json')
+ write_json_file(parts_path_info_file, parts_path_info)
+ path_to_parts = {}
+ for _key, _val in parts_path_info.items():
+ _p_list = path_to_parts.get(_val, [])
+ _p_list.append(_key)
+ path_to_parts[_val] = _p_list
+ path_to_parts_file = os.path.join(parts_info_output_path,
+ 'path_to_parts.json')
+ write_json_file(path_to_parts_file, path_to_parts)
+
+
+def get_parts_info(source_root_dir,
+ config_output_relpath,
+ subsystem_info,
+ variant_toolchains,
+ target_arch,
+ ignored_subsystems,
+ build_xts=False):
+ """parts info,
+ get info from build config file.
+ """
+ parts_variants = {}
+ parts_inner_kits_info = {}
+ parts_kits_info = {}
+ parts_targets = {}
+ parts_info = {}
+ subsystem_parts = {}
+ _phony_target = {}
+ _parts_path_info = {}
+ for subsystem_name, build_config_info in subsystem_info.items():
+ if subsystem_name == 'xts' and build_xts is False:
+ continue
+ build_loader = LoadBuildConfig(source_root_dir, build_config_info,
+ config_output_relpath,
+ variant_toolchains, subsystem_name,
+ target_arch, ignored_subsystems)
+ _parts_variants = build_loader.parts_variants()
+ parts_variants.update(_parts_variants)
+ _inner_kits_info = build_loader.parts_inner_kits_info()
+ parts_inner_kits_info.update(_inner_kits_info)
+ parts_kits_info.update(build_loader.parts_kits_info())
+ _parts_targets = build_loader.parts_build_targets()
+ parts_targets.update(_parts_targets)
+ subsystem_parts[subsystem_name] = build_loader.parts_name_list()
+ parts_info.update(build_loader.parts_info())
+ _phony_target.update(build_loader.parts_phony_target())
+ _parts_path_info.update(build_loader.parts_path_info())
+ parts_config_dict = {}
+ parts_config_dict['parts_info'] = parts_info
+ parts_config_dict['subsystem_parts'] = subsystem_parts
+ parts_config_dict['parts_variants'] = parts_variants
+ parts_config_dict['parts_inner_kits_info'] = parts_inner_kits_info
+ parts_config_dict['parts_kits_info'] = parts_kits_info
+ parts_config_dict['parts_targets'] = parts_targets
+ parts_config_dict['phony_target'] = _phony_target
+ parts_config_dict['parts_path_info'] = _parts_path_info
+ _output_parts_info(parts_config_dict,
+ os.path.join(source_root_dir, config_output_relpath))
+ return parts_config_dict
diff --git a/dsoftbus/build/loader/merge_platform_build.py b/dsoftbus/build/loader/merge_platform_build.py
new file mode 100755
index 0000000000000000000000000000000000000000..7bc5b8b7123a4897380887a708acd2595551b309
--- /dev/null
+++ b/dsoftbus/build/loader/merge_platform_build.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import optparse
+import os
+import sys
+import json
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from scripts.util import build_utils # noqa: E402
+
+
+def parse_args(args):
+ parser = optparse.OptionParser()
+
+ parser.add_option(
+ '--generated-build-file',
+ help='path to generated platform build file')
+ parser.add_option(
+ '--required-build-file', help='path to required platform build file')
+ parser.add_option(
+ '--optional-build-file', help='path to optional platform build file')
+ parser.add_option(
+ '--stub-build-file', help='path to stub platform build file')
+
+ options, _ = parser.parse_args(args)
+ return options
+
+
+def do_merge(required_build_file, optional_build_file, stub_build_file,
+ generated_build_file):
+ with open(required_build_file, 'r') as required_f:
+ required = json.load(required_f)
+ with open(optional_build_file, 'r') as optional_f:
+ optional = json.load(optional_f)
+ with open(stub_build_file, 'r') as stub_f:
+ stub = json.load(stub_f)
+
+ parts = required.get('parts') + optional.get('parts')
+ stub_parts = list(set(stub.get('stub_parts')) - set(parts))
+
+ if not os.path.exists(generated_build_file):
+ build_utils.touch(generated_build_file)
+ build_utils.write_json({
+ 'parts': parts,
+ 'stub_parts': stub_parts,
+ },
+ generated_build_file,
+ only_if_changed=True)
+
+
+def main(args):
+ options = parse_args(args)
+ do_merge(options.required_build_file, options.optional_build_file,
+ options.stub_build_file, options.generated_build_file)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/loader/platforms_loader.py b/dsoftbus/build/loader/platforms_loader.py
new file mode 100755
index 0000000000000000000000000000000000000000..a4cf4ab90d7b15945fd52a7ea88aa2cd861a2377
--- /dev/null
+++ b/dsoftbus/build/loader/platforms_loader.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+import os
+import merge_platform_build
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+
+class PlatformsLoader:
+ def __init__(self, platforms_config_file, source_root_dir, root_build_dir,
+ target_arch, scalable_build):
+ self._platforms_config_file = platforms_config_file
+ self._source_root_dir = source_root_dir
+ self._root_build_dir = root_build_dir
+ self._platforms_info = {}
+ self._all_parts = {}
+ self._all_stubs = {}
+ self._is_load = False
+ self._target_arch = target_arch
+ self._scalable_build = scalable_build
+
+ def _read_platforms_config(self):
+ if not os.path.exists(self._platforms_config_file):
+ raise Exception("config file '{}' doesn't exist.".format(
+ self._platforms_config_file))
+ config_info = read_json_file(self._platforms_config_file)
+ if config_info is None:
+ raise Exception("read file '{}' failed.".format(
+ self._platforms_config_file))
+ platforms_info = config_info.get('platforms')
+
+ platforms_config = {}
+ for _name, _platform_infos in platforms_info.items():
+ for _info in _platform_infos:
+ target_os = _info.get('target_os')
+ target_cpu = _info.get('target_cpu')
+ if target_os is None or target_cpu is None:
+ error_info = "platform '{}' config incorrect,"\
+ "target_os or target_cpu is None."
+ raise Exception(error_info.format(_name))
+ arch = "{}_{}".format(target_os, target_cpu)
+ if arch in platforms_config:
+ _infos = platforms_config.get(arch)
+ else:
+ _infos = {}
+ _infos[_name] = _info
+ platforms_config[arch] = _infos
+ return platforms_config
+
+ @staticmethod
+ def _load_platform_config(platform_config_file):
+ if not os.path.exists(platform_config_file):
+ raise Exception("config file '{}' doesn't exist.".format(
+ platform_config_file))
+ _platform_config = read_json_file(platform_config_file)
+ if _platform_config is None:
+ raise Exception(
+ "read file '{}' failed.".format(platform_config_file))
+ if 'parts' in _platform_config:
+ parts = _platform_config.get('parts')
+ else:
+ parts = []
+ if 'stub_parts' in _platform_config:
+ stub_parts = _platform_config.get('stub_parts')
+ else:
+ stub_parts = []
+ return parts, stub_parts
+
+ def _loading(self):
+ if self._is_load:
+ return
+ all_platforms_config = self._read_platforms_config()
+ _platforms_info = all_platforms_config.get(self._target_arch)
+ config_base_dir = os.path.dirname(self._platforms_config_file)
+ all_parts = {}
+ all_stubs = {}
+ for _platform_name, _info in _platforms_info.items():
+ if self._scalable_build is False:
+ _file_name = _info.get('parts_config')
+ if _file_name is None:
+ _file_name = os.path.join(
+ 'target_platforms', "{}.build".format(_platform_name))
+ _platform_config_file = os.path.join(config_base_dir,
+ _file_name)
+ else:
+ required_build_file = os.path.join(
+ config_base_dir, _info.get('base_parts_config'))
+ optional_build_file = os.path.join(
+ config_base_dir, _info.get('optional_parts_config'))
+ stub_build_file = os.path.join(
+ config_base_dir, _info.get('stub_parts_config'))
+ _platform_config_file = '{}.build'.format(_platform_name)
+
+ merge_platform_build.do_merge(required_build_file,
+ optional_build_file,
+ stub_build_file,
+ _platform_config_file)
+
+ parts, stubs = self._load_platform_config(_platform_config_file)
+ all_parts[_platform_name] = parts
+ all_stubs[_platform_name] = stubs
+
+ self._platforms_info = _platforms_info
+ self._all_parts = all_parts
+ self._all_stubs = all_stubs
+ self._is_load = True
+
+ def get_all_parts(self):
+ self._loading()
+ all_parts = {}
+ for _platform, _parts in self._all_parts.items():
+ part_list = []
+ for part_def in _parts:
+ _part_name = part_def.split(':')[1]
+ part_list.append(_part_name)
+ all_parts[_platform] = part_list
+ return all_parts
+
+ def get_all_stubs(self):
+ self._loading()
+ all_parts = {}
+ for _platform, _parts in self._all_stubs.items():
+ part_list = []
+ for part_def in _parts:
+ _part_name = part_def.split(':')[1]
+ part_list.append(_part_name)
+ all_parts[_platform] = part_list
+ return all_parts
+
+ def platforms_toolchain(self):
+ self._loading()
+ platform_toolchain = {}
+ toolchain_platform = {}
+ for key, val in self._platforms_info.items():
+ _toolchain = val.get('toolchain')
+ platform_toolchain[key] = _toolchain
+ toolchain_platform[_toolchain] = key
+ _result = {
+ "platform_toolchain": platform_toolchain,
+ "toolchain_platform": toolchain_platform
+ }
+ return _result
+
+
+def get_platforms_info(platforms_config_file, source_root_dir, root_build_dir,
+ target_arch, config_output_relpath, scalable_build):
+ platform_loader = PlatformsLoader(platforms_config_file, source_root_dir,
+ root_build_dir, target_arch,
+ scalable_build)
+
+ platforms_info_output_dir = 'platforms_info'
+ all_parts = platform_loader.get_all_parts()
+ all_parts_file = os.path.join(source_root_dir, config_output_relpath,
+ platforms_info_output_dir, "all_parts.json")
+ write_json_file(all_parts_file, all_parts)
+
+ # variant to toolchain and toolchain to variant
+ toolchain_to_variant_dict = platform_loader.platforms_toolchain()
+ toolchain_variant_info_file = os.path.join(source_root_dir,
+ config_output_relpath,
+ platforms_info_output_dir,
+ "toolchain_to_variant.json")
+ write_json_file(toolchain_variant_info_file,
+ toolchain_to_variant_dict,
+ check_changes=True)
+
+ result = {}
+ result['all_parts'] = all_parts
+ result['all_stubs'] = platform_loader.get_all_stubs()
+ result['variant_toolchain_info'] = toolchain_to_variant_dict
+ return result
diff --git a/dsoftbus/build/loader/preloader/platforms.template b/dsoftbus/build/loader/preloader/platforms.template
new file mode 100644
index 0000000000000000000000000000000000000000..5cad64dd26b062ae0a6ddb0e9710342c7bd34462
--- /dev/null
+++ b/dsoftbus/build/loader/preloader/platforms.template
@@ -0,0 +1,19 @@
+{
+ "platforms": {
+ "phone": [
+ {
+ "target_os": "ohos",
+ "target_cpu": "arm64",
+ "toolchain": "//build/toolchain/ohos:ohos_clang_arm64",
+ "parts_config": "./parts.json"
+ },
+ {
+ "target_os": "ohos",
+ "target_cpu": "arm",
+ "toolchain": "//build/toolchain/ohos:ohos_clang_arm",
+ "parts_config": "./parts.json"
+ }
+ ]
+ }
+}
+
diff --git a/dsoftbus/build/loader/preloader/preloader.py b/dsoftbus/build/loader/preloader/preloader.py
new file mode 100755
index 0000000000000000000000000000000000000000..4e0161d2a4a473a303aaa9c216b4d84cce32437b
--- /dev/null
+++ b/dsoftbus/build/loader/preloader/preloader.py
@@ -0,0 +1,242 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file, write_file # noqa: E402, E501
+
+
+def _get_product_config(config_dir, product_name):
+ config_file = os.path.join(config_dir, '{}.json'.format(product_name))
+ if not os.path.exists(config_file):
+ raise Exception(
+ "product configuration '{}' doesn't exist.".format(config_file))
+ return config_file
+
+
+def _get_base_parts(base_config_dir, system_type):
+ system_base_config_file = os.path.join(
+ base_config_dir, '{}_system.json'.format(system_type))
+ if not os.path.exists(system_base_config_file):
+ raise Exception("product configuration '{}' doesn't exist.".format(
+ system_base_config_file))
+ data = read_json_file(system_base_config_file)
+ if data is None:
+ raise Exception(
+ "read file '{}' failed.".format(system_base_config_file))
+ return data
+
+
+def _get_inherit_parts(inherit_config, current_path):
+ _parts = {}
+ for _config in inherit_config:
+ _file = os.path.join(current_path, _config)
+ _info = read_json_file(_file)
+ if _info is None:
+ raise Exception("read file '{}' failed.".format(_file))
+ if _info.get('parts'):
+ _parts.update(_info.get('parts'))
+ return _parts
+
+
+def _get_device_info(device_name, device_config_path):
+ device_config_file = os.path.join(device_config_path,
+ '{}.json'.format(device_name))
+ device_info = read_json_file(device_config_file)
+ if device_info.get('device_name') != device_name:
+ raise Exception("device name configuration incorrect in '{}'".format(device_config_file))
+ return device_info
+
+
+def _parse_config_v2(config_info, products_config_path, base_config_dir,
+ device_config_path):
+ system_type = config_info.get("type")
+ base_parts = _get_base_parts(base_config_dir, system_type)
+ all_parts = base_parts
+
+ inherit_config = config_info.get('inherit')
+ if inherit_config:
+ inherit_parts = _get_inherit_parts(inherit_config,
+ products_config_path)
+ all_parts.update(inherit_parts)
+
+ current_product_parts = config_info.get("parts")
+ all_parts.update(current_product_parts)
+
+ product_name = config_info.get('product_name')
+ product_company = config_info.get('product_company')
+ product_device_name = config_info.get('product_device')
+ device_info = _get_device_info(product_device_name, device_config_path)
+ build_configs = {}
+ build_configs['system_type'] = system_type
+ build_configs['product_name'] = product_name
+ build_configs['product_company'] = product_company
+ build_configs['device_name'] = product_device_name
+ build_configs.update(device_info)
+ return all_parts, build_configs
+
+
+def _parse_config_v1(config_info):
+ build_configs = {"system_type": 'large'}
+ return {}, build_configs
+
+
+def _parse_config(product_name, products_config_path, base_parts_config_path,
+ device_config_path):
+ curr_config_file = _get_product_config(products_config_path, product_name)
+ config_info = read_json_file(curr_config_file)
+ config_version = config_info.get('version')
+ if not config_version:
+ config_version = "1.0"
+ if config_version == "2.0":
+ if product_name != config_info.get('product_name'):
+ raise Exception("product name configuration incorrect in '{}'".format(curr_config_file))
+ return _parse_config_v2(config_info, products_config_path,
+ base_parts_config_path, device_config_path)
+ else:
+ return _parse_config_v1(config_info)
+
+
+def _copy_platforms_config(platforms_template, parts_info_file,
+ platform_config_output_path):
+ if not os.path.exists(platforms_template):
+ raise Exception(
+ "template file '{}' doesn't exist.".format(platforms_template))
+ data = read_json_file(platforms_template)
+ if data is None:
+ raise Exception("read file '{}' failed.".format(platforms_template))
+ _parts_config_file = os.path.relpath(parts_info_file,
+ platform_config_output_path)
+ for _, _config in data.get('platforms').items():
+ for _info in _config:
+ _info['parts_config'] = _parts_config_file
+ output_file = os.path.join(platform_config_output_path, 'platforms.build')
+ write_json_file(output_file, data)
+
+
+def _get_platform_template_file(source_root_dir):
+ platforms_template = os.path.join(source_root_dir,
+ 'build/loader/preloader',
+ 'platforms.template')
+ return platforms_template
+
+
+def _get_merge_subsystem_config(product_config_path, device_config_path,
+ subsystem_config_file, output_dir,
+ product_name):
+ product_config_file = os.path.join(product_config_path,
+ '{}.json'.format(product_name))
+ output_file = os.path.join(output_dir, 'subsystem_config.json')
+ subsystem_info = read_json_file(subsystem_config_file)
+
+ product_subsystem_info = {}
+ product_info = read_json_file(product_config_file)
+ product_build_path = product_info.get('product_build_path', 'no_path')
+ if product_build_path != 'no_path' and product_build_path != '':
+ product_subsystem_info['path'] = product_build_path
+ product_subsystem_name = "product_{}".format(product_name)
+ product_subsystem_info['name'] = product_subsystem_name
+ subsystem_info[product_subsystem_name] = product_subsystem_info
+
+ product_device_name = product_info.get('product_device')
+ device_info = _get_device_info(product_device_name, device_config_path)
+
+ device_subsystem_info = {}
+ device_build_path = device_info.get('device_build_path', 'no_path')
+ if device_build_path != 'no_path' and device_build_path != '':
+ device_subsystem_info['path'] = device_build_path
+ device_subsystem_name = "device_{}".format(product_device_name)
+ device_subsystem_info['name'] = device_subsystem_name
+ subsystem_info[device_subsystem_name] = device_subsystem_info
+ write_json_file(output_file, subsystem_info)
+
+
+def _run(args):
+ products_config_path = os.path.join(args.source_root_dir,
+ args.products_config_dir)
+ product_config_root_path = os.path.dirname(products_config_path)
+ if args.base_parts_config_dir:
+ base_parts_config_path = os.path.join(args.source_root_dir,
+ args.base_parts_config_dir)
+ else:
+ base_parts_config_path = os.path.join(product_config_root_path, 'base')
+ if args.device_config_dir:
+ device_config_path = os.path.join(args.source_root_dir,
+ args.device_config_dir)
+ else:
+ device_config_path = os.path.join(product_config_root_path, 'device')
+
+ all_parts, build_configs = _parse_config(args.product_name,
+ products_config_path,
+ base_parts_config_path,
+ device_config_path)
+
+ system_type = build_configs.get('system_type')
+ if system_type not in ['standard', 'large']:
+ raise Exception("product config incorrect.")
+
+ product_info_output_path = os.path.join(args.source_root_dir,
+ args.preloader_output_root_dir,
+ args.product_name, 'preloader')
+ platform_config_output_path = os.path.join(args.source_root_dir,
+ args.preloader_output_root_dir,
+ '{}_system'.format(system_type))
+
+ parts_info_file = os.path.join(product_info_output_path, 'parts.json')
+ parts_config_info = {"parts": list(all_parts.keys())}
+ write_json_file(parts_info_file, parts_config_info)
+
+ platforms_template_file = _get_platform_template_file(args.source_root_dir)
+ _copy_platforms_config(platforms_template_file, parts_info_file,
+ platform_config_output_path)
+
+ _build_info_list = []
+ build_info_file = os.path.join(product_info_output_path, 'build.prop')
+ for k, v in build_configs.items():
+ _build_info_list.append('{}={}'.format(k, v))
+ write_file(build_info_file, '\n'.join(_build_info_list))
+ build_info_json_file = os.path.join(product_info_output_path, 'build_config.json')
+ write_json_file(build_info_json_file, build_configs)
+
+ subsystem_config_file = os.path.join(args.source_root_dir,
+ 'build',
+ 'subsystem_config.json')
+ output_dir = os.path.join(args.source_root_dir,
+ args.preloader_output_root_dir)
+ _get_merge_subsystem_config(products_config_path, device_config_path,
+ subsystem_config_file, output_dir,
+ args.product_name)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--product-name', required=True)
+ parser.add_argument('--source-root-dir', required=True)
+ parser.add_argument('--products-config-dir', required=True)
+ parser.add_argument('--base-parts-config-dir')
+ parser.add_argument('--device-config-dir')
+ parser.add_argument('--preloader-output-root-dir', required=True)
+ args = parser.parse_args(argv)
+ _run(args)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/loader/subsystem_info.py b/dsoftbus/build/loader/subsystem_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..083ff4b21004e8fe5958b57b33e6ab2c4b827800
--- /dev/null
+++ b/dsoftbus/build/loader/subsystem_info.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import sys
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from loader import subsystem_scan # noqa: E402
+from scripts.util.file_utils import write_json_file # noqa: E402
+
+
+def _output_subsystem_configs(output_dir, subsystem_configs):
+ build_config_file_name = "subsystem_build_config.json"
+ build_config_file = os.path.join(output_dir, 'subsystem_info',
+ build_config_file_name)
+ write_json_file(build_config_file, subsystem_configs)
+
+ src_output_file_name = "src_subsystem_info.json"
+ no_src_output_file_name = "no_src_subsystem_info.json"
+
+ src_subsystem = {}
+ for key, val in subsystem_configs.get('subsystem').items():
+ src_subsystem[key] = val.get('path')
+ src_output_file = os.path.join(output_dir, 'subsystem_info',
+ src_output_file_name)
+ write_json_file(src_output_file, src_subsystem)
+
+ no_src_output_file = os.path.join(output_dir, 'subsystem_info',
+ no_src_output_file_name)
+ write_json_file(no_src_output_file,
+ subsystem_configs.get('no_src_subsystem'))
+
+
+def get_subsystem_info(subsystem_config_file, example_subsystem_file,
+ source_root_dir, config_output_path):
+ if not subsystem_config_file:
+ subsystem_config_file = 'build/subsystem_config.json'
+
+ subsystem_configs = subsystem_scan.scan(subsystem_config_file,
+ example_subsystem_file,
+ source_root_dir)
+ output_dir_realpath = os.path.join(source_root_dir, config_output_path)
+ _output_subsystem_configs(output_dir_realpath, subsystem_configs)
+ return subsystem_configs.get('subsystem')
diff --git a/dsoftbus/build/loader/subsystem_scan.py b/dsoftbus/build/loader/subsystem_scan.py
new file mode 100755
index 0000000000000000000000000000000000000000..8af78646cec061c017dc1a1890338ff21448ec5b
--- /dev/null
+++ b/dsoftbus/build/loader/subsystem_scan.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+import glob
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+_default_subsystem = {"common": "build/common"}
+
+
+def _read_config(subsystem_config_file, example_subsystem_file):
+ if not os.path.exists(subsystem_config_file):
+ raise Exception(
+ "config file '{}' doesn't exist.".format(subsystem_config_file))
+ subsystem_config = read_json_file(subsystem_config_file)
+ if subsystem_config is None:
+ raise Exception("read file '{}' failed.".format(subsystem_config_file))
+
+ # example subsystem
+ if example_subsystem_file:
+ example_subsystem_config = read_json_file(example_subsystem_file)
+ if example_subsystem_config is not None:
+ subsystem_config.update(example_subsystem_config)
+
+ subsystem_info = {}
+ for key, val in subsystem_config.items():
+ if 'path' not in val:
+ raise Exception("subsystem '{}' not config path.".format(key))
+ subsystem_info[key] = val.get('path')
+ return subsystem_info
+
+
+def _scan_build_file(subsystem_path):
+ build_config_file_name = "ohos.build"
+ search_str = "{}/**/{}".format(subsystem_path, build_config_file_name)
+ _files = glob.glob(search_str, recursive=True)
+ return _files
+
+
+def scan(subsystem_config_file, example_subsystem_file, source_root_dir):
+ subsystem_infos = _read_config(subsystem_config_file,
+ example_subsystem_file)
+ # add common subsystem info
+ subsystem_infos.update(_default_subsystem)
+
+ no_src_subsystem = {}
+ _build_configs = {}
+ for key, val in subsystem_infos.items():
+ _info = {'path': val}
+ _subsystem_path = os.path.join(source_root_dir, val)
+
+ _build_config_files = _scan_build_file(_subsystem_path)
+
+ if _build_config_files:
+ _info['build_files'] = _build_config_files
+ _build_configs[key] = _info
+ else:
+ no_src_subsystem[key] = val
+
+ scan_result = {
+ 'source_path': source_root_dir,
+ 'subsystem': _build_configs,
+ 'no_src_subsystem': no_src_subsystem
+ }
+ return scan_result
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--subsystem-config-file', required=True)
+ parser.add_argument('--example-subsystem-file', required=False)
+ parser.add_argument('--source-root-dir', required=True)
+ parser.add_argument('--output-dir', required=True)
+ args = parser.parse_args()
+
+ build_configs = scan(args.subsystem_config_file,
+ args.example_subsystem_file, args.source_root_dir)
+
+ build_configs_file = os.path.join(args.output_dir,
+ "subsystem_build_config.json")
+ write_json_file(build_configs_file, build_configs)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/misc/linux/sysroot_ld_path.sh b/dsoftbus/build/misc/linux/sysroot_ld_path.sh
new file mode 100755
index 0000000000000000000000000000000000000000..770cda661e7213f02755239dad19d52d2d8f4697
--- /dev/null
+++ b/dsoftbus/build/misc/linux/sysroot_ld_path.sh
@@ -0,0 +1,98 @@
+#!/bin/bash
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
+# appropriate linker flags.
+#
+# sysroot_ld_path.sh /abspath/to/sysroot
+#
+
+log_error_and_exit() {
+ echo $0: $@
+ exit 1
+}
+
+process_entry() {
+ if [ -z "$1" ] || [ -z "$2" ]; then
+ log_error_and_exit "bad arguments to process_entry()"
+ fi
+ local root="$1"
+ local localpath="$2"
+
+ echo $localpath | grep -qs '^/'
+ if [ $? -ne 0 ]; then
+ log_error_and_exit $localpath does not start with /
+ fi
+ local entry="$root$localpath"
+ echo $entry
+}
+
+process_ld_so_conf() {
+ if [ -z "$1" ] || [ -z "$2" ]; then
+ log_error_and_exit "bad arguments to process_ld_so_conf()"
+ fi
+ local root="$1"
+ local ld_so_conf="$2"
+
+ # ld.so.conf may include relative include paths. pushd is a bashism.
+ local saved_pwd=$(pwd)
+ cd $(dirname "$ld_so_conf")
+
+ cat "$ld_so_conf" | \
+ while read ENTRY; do
+ echo "$ENTRY" | grep -qs ^include
+ if [ $? -eq 0 ]; then
+ local included_files=$(echo "$ENTRY" | sed 's/^include //')
+ echo "$included_files" | grep -qs ^/
+ if [ $? -eq 0 ]; then
+ if ls $root$included_files >/dev/null 2>&1 ; then
+ for inc_file in $root$included_files; do
+ process_ld_so_conf "$root" "$inc_file"
+ done
+ fi
+ else
+ if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
+ for inc_file in $(pwd)/$included_files; do
+ process_ld_so_conf "$root" "$inc_file"
+ done
+ fi
+ fi
+ continue
+ fi
+
+ echo "$ENTRY" | grep -qs ^/
+ if [ $? -eq 0 ]; then
+ process_entry "$root" "$ENTRY"
+ fi
+ done
+
+ # popd is a bashism
+ cd "$saved_pwd"
+}
+
+# Main
+if [ $# -ne 1 ]; then
+ echo "Usage $0 abspath to sysroot"
+ exit 1
+fi
+
+echo $1 | grep -qs ' '
+if [ $? -eq 0 ]; then
+ log_error_and_exit $1 contains whitespace.
+fi
+
+LD_SO_CONF="$1/etc/ld.so.conf"
+LD_SO_CONF_D="$1/etc/ld.so.conf.d"
+
+if [ -e "$LD_SO_CONF" ]; then
+ process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
+elif [ -e "$LD_SO_CONF_D" ]; then
+ find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
+ if [ $? -eq 0 ]; then
+ for entry in $LD_SO_CONF_D/*.conf; do
+ process_ld_so_conf "$1" "$entry"
+ done | xargs echo
+ fi
+fi
diff --git a/dsoftbus/build/misc/mac/check_return_value.py b/dsoftbus/build/misc/mac/check_return_value.py
new file mode 100755
index 0000000000000000000000000000000000000000..1bbb59663ba6d17e1024df62ca794ebd3496ab65
--- /dev/null
+++ b/dsoftbus/build/misc/mac/check_return_value.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+import os
+import subprocess
+import sys
+
+with open(os.devnull, 'wb') as devnull:
+ if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+ print(1)
+ else:
+ print(0)
diff --git a/dsoftbus/build/misc/mac/find_sdk.py b/dsoftbus/build/misc/mac/find_sdk.py
new file mode 100755
index 0000000000000000000000000000000000000000..15b19f8d9d1b2945fc6270fdf5e39cbd077b68e4
--- /dev/null
+++ b/dsoftbus/build/misc/mac/find_sdk.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output. If --developer_dir is passed, then
+the script will use the Xcode toolchain located at DEVELOPER_DIR.
+
+Usage:
+ python find_sdk.py [--developer_dir DEVELOPER_DIR] 10.6 # Ignores SDKs < 10.6
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+from optparse import OptionParser
+
+
+class SdkError(Exception):
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return repr(self.value)
+
+
+def parse_version(version_str):
+ """'10.6' => [10, 6]"""
+ return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+ parser = OptionParser()
+ parser.add_option("--verify",
+ action="store_true", dest="verify", default=False,
+ help="return the sdk argument and warn if it doesn't exist")
+ parser.add_option("--sdk_path",
+ action="store", type="string", dest="sdk_path",
+ default="",
+ help="user-specified SDK path; bypasses verification")
+ parser.add_option("--print_sdk_path",
+ action="store_true", dest="print_sdk_path", default=False,
+ help="Additionally print the path the SDK (appears first).")
+ parser.add_option("--developer_dir", help='Path to Xcode.')
+ options, args = parser.parse_args()
+ if len(args) != 1:
+ parser.error('Please specify a minimum SDK version')
+ min_sdk_version = args[0]
+
+ if options.developer_dir:
+ os.environ['DEVELOPER_DIR'] = options.developer_dir
+
+ job = subprocess.Popen(['xcode-select', '-print-path'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ out, err = job.communicate()
+ if job.returncode != 0:
+ print(out, file=sys.stderr)
+ print(err, file=sys.stderr)
+ raise Exception('Error %d running xcode-select' % job.returncode)
+ sdk_dir = os.path.join(
+ str(out.rstrip(), encoding="utf-8"),
+ 'Platforms/MacOSX.platform/Developer/SDKs')
+ # Xcode must be installed, its license agreement must be accepted, and its
+ # command-line tools must be installed. Stand-alone installations (in
+ # /Library/Developer/CommandLineTools) are not supported.
+ # https://bugs.chromium.org/p/chromium/issues/detail?id=729990#c1
+ file_path = os.path.relpath("/path/to/Xcode.app")
+ if not os.path.isdir(sdk_dir) or not '.app/Contents/Developer' in sdk_dir:
+ raise SdkError('Install Xcode, launch it, accept the license ' +
+ 'agreement, and run `sudo xcode-select -s %s` ' % file_path +
+ 'to continue.')
+ sdks = [re.findall('^MacOSX(1[0,1]\.\d+)\.sdk$', s) for s in
+ os.listdir(sdk_dir)]
+ sdks = [s[0] for s in sdks if s] # [['10.5'], ['10.6']] => ['10.5', '10.6']
+ sdks = [s for s in sdks # ['10.5', '10.6'] => ['10.6']
+ if list(parse_version(s)) >= list(parse_version(min_sdk_version))]
+
+ if not sdks:
+ raise Exception('No %s+ SDK found' % min_sdk_version)
+ best_sdk = sorted(sdks)[0]
+
+ if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+ print('', file=sys.stderr)
+ print(' vvvvvvv',
+ file=sys.stderr)
+ print('', file=sys.stderr)
+ print(
+ 'This build requires the %s SDK, but it was not found on your system.' \
+ % min_sdk_version, file=sys.stderr)
+ print(
+ 'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.',
+ file=sys.stderr)
+ print('', file=sys.stderr)
+ print(' ^^^^^^^',
+ file=sys.stderr)
+ print('', file=sys.stderr)
+ sys.exit(1)
+
+ if options.print_sdk_path:
+ _sdk_path = subprocess.check_output(
+ ['xcrun', '-sdk', 'macosx' + best_sdk, '--show-sdk-path']).strip()
+ if isinstance(_sdk_path, bytes):
+ _sdk_path = _sdk_path.decode()
+ print(_sdk_path)
+ return best_sdk
+
+
+if __name__ == '__main__':
+ if sys.platform != 'darwin':
+ raise Exception("This script only runs on Mac")
+ print(main())
+ sys.exit(0)
diff --git a/dsoftbus/build/misc/overrides/build.gni b/dsoftbus/build/misc/overrides/build.gni
new file mode 100755
index 0000000000000000000000000000000000000000..f7162e9f7dce6467abed27197320c0283ae4f341
--- /dev/null
+++ b/dsoftbus/build/misc/overrides/build.gni
@@ -0,0 +1,19 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Variable that can be used to support multiple build scenarios, like having
+# Chromium specific targets in a client project's GN file etc.
+build_with_chromium = false
+
+# Some non-Chromium builds don't support building java targets.
+enable_java_templates = true
+
+# Some non-Chromium builds don't use Chromium's third_party/binutils.
+linux_use_bundled_binutils_override = true
+
+# Skip assertions about 4GiB file size limit.
+ignore_elf32_limitations = true
+
+# Use the system install of Xcode for tools like ibtool, libtool, etc.
+use_system_xcode = true
diff --git a/dsoftbus/build/misc/overrides/gtest.gni b/dsoftbus/build/misc/overrides/gtest.gni
new file mode 100644
index 0000000000000000000000000000000000000000..bf30fb1d7229f4a299aa0e812d337ce29cd65043
--- /dev/null
+++ b/dsoftbus/build/misc/overrides/gtest.gni
@@ -0,0 +1,15 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Exclude support for registering main function in multi-process tests.
+gtest_include_multiprocess = false
+
+# Exclude support for platform-specific operations across unit tests.
+gtest_include_platform_test = false
+
+# Exclude support for testing Objective C code on OS X and iOS.
+gtest_include_objc_support = false
+
+# Exclude support for flushing coverage files on iOS.
+gtest_include_ios_coverage = false
diff --git a/dsoftbus/build/misc/sanitizers/asan_suppressions.cc b/dsoftbus/build/misc/sanitizers/asan_suppressions.cc
new file mode 100755
index 0000000000000000000000000000000000000000..df94bc895032db40ec0518600969babdb129f157
--- /dev/null
+++ b/dsoftbus/build/misc/sanitizers/asan_suppressions.cc
@@ -0,0 +1,23 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for AddressSanitizer.
+// It should only be used under very limited circumstances such as suppressing
+// a report caused by an interceptor call in a system-installed library.
+
+#if defined(ADDRESS_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kASanDefaultSuppressions which contains ASan suppressions delimited by
+// newlines.
+char kASanDefaultSuppressions[] =
+// http://crbug.com/178677
+"interceptor_via_lib:libsqlite3.so\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+; // Please keep this semicolon.
+
+#endif // ADDRESS_SANITIZER
diff --git a/dsoftbus/build/misc/sanitizers/lsan_suppressions.cc b/dsoftbus/build/misc/sanitizers/lsan_suppressions.cc
new file mode 100755
index 0000000000000000000000000000000000000000..464c9e296dc3b1f847389246157c731242076f9c
--- /dev/null
+++ b/dsoftbus/build/misc/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,103 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+ // Intentional leak used as sanity test for Valgrind/memcheck.
+ "leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+ // ================ Leaks in third-party code ================
+
+ // False positives in libfontconfig. http://crbug.com/39050
+ "leak:libfontconfig\n"
+ // eglibc-2.19/string/strdup.c creates false positive leak errors because of
+ // the same reason as crbug.com/39050. The leak error stack trace, when
+ // unwind on malloc, includes a call to libfontconfig. But the default stack
+ // trace is too short in leak sanitizer bot to make the libfontconfig
+ // suppression works. http://crbug.com/605286
+ "leak:__strdup\n"
+
+ // Leaks in Nvidia's libGL.
+ "leak:libGL.so\n"
+
+ "leak:net::NSSCertDatabase::ImportFromPKCS12\n"
+ "leak:net::NSSCertDatabase::ListCerts\n"
+ "leak:net::NSSCertDatabase::DeleteCertAndKey\n"
+ "leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
+ // Another leak due to not shutting down NSS properly.
+ // http://crbug.com/124445
+ "leak:error_get_my_stack\n"
+ // The NSS suppressions above will not fire when the fast stack unwinder is
+ // used, because it can't unwind through NSS libraries. Apply blanket
+ // suppressions for now.
+ "leak:libnssutil3\n"
+ "leak:libnspr4\n"
+ "leak:libnss3\n"
+ "leak:libplds4\n"
+ "leak:libnssckbi\n"
+
+ // XRandR has several one time leaks.
+ "leak:libxrandr\n"
+
+ // xrandr leak. http://crbug.com/119677
+ "leak:XRRFindDisplay\n"
+
+ // http://crbug.com/431213, http://crbug.com/416665
+ "leak:gin/object_template_builder.h\n"
+
+ // Leaks in swrast_dri.so. http://crbug.com/540042
+ "leak:swrast_dri.so\n"
+
+ // Leak in glibc's gconv caused by fopen(..., "r,ccs=UNICODE")
+ "leak:__gconv_lookup_cache\n"
+
+ // ================ Leaks in Chromium code ================
+ // PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+ // Instead, commits that introduce memory leaks should be reverted.
+ // Suppressing the leak is acceptable in some cases when reverting is
+ // impossible, i.e. when enabling leak detection for the first time for a
+ // test target with pre-existing leaks.
+
+ // Small test-only leak in ppapi_unittests. http://crbug.com/258113
+ "leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_"
+ "Test\n"
+
+ // http://crbug.com/322671
+ "leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
+
+ // http://crbug.com/355641
+ "leak:TrayAccessibilityTest\n"
+
+ // http://crbug.com/354644
+ "leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
+
+ // http://crbug.com/356306
+ "leak:service_manager::SetProcessTitleFromCommandLine\n"
+
+ // https://crbug.com/755670
+ "leak:third_party/yasm/\n"
+
+ // v8 leaks caused by weak ref not call
+ "leak:blink::DOMWrapperWorld::Create\n"
+ "leak:blink::ScriptState::Create\n"
+
+ // https://crbug.com/795148
+ "leak:third_party/fontconfig/\n"
+
+ // PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+ // End of suppressions.
+ ; // Please keep this semicolon.
+
+#endif // LEAK_SANITIZER
diff --git a/dsoftbus/build/misc/sanitizers/sanitizer_options.cc b/dsoftbus/build/misc/sanitizers/sanitizer_options.cc
new file mode 100755
index 0000000000000000000000000000000000000000..02ff91364adc0aa1f9cd0e13ab9dc85d128b5a07
--- /dev/null
+++ b/dsoftbus/build/misc/sanitizers/sanitizer_options.cc
@@ -0,0 +1,175 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \
+ defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER) || \
+ defined(UNDEFINED_SANITIZER)
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE \
+ extern "C" \
+ __attribute__((no_sanitize("address", "memory", "thread", "undefined"))) \
+ __attribute__((visibility("default"))) \
+ __attribute__((used))
+#endif
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+// malloc_context_size=5 - limit the size of stack traces collected by ASan
+// for each malloc/free by 5 frames. These stack traces tend to accumulate
+// very fast in applications using JIT (v8 in Chrome's case), see
+// https://code.google.com/p/address-sanitizer/issues/detail?id=177
+// symbolize=1 - enable in-process symbolization.
+// legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+// work around libGL.so using the obsolete API, see
+// http://crbug.com/341805. This may break if pthread_cond_t objects are
+// accessed by both instrumented and non-instrumented binaries (e.g. if
+// they reside in shared memory). This option is going to be deprecated in
+// upstream AddressSanitizer and must not be used anywhere except the
+// official builds.
+// check_printf=1 - check the memory accesses to printf (and other formatted
+// output routines) arguments.
+// use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+// for stack overflow detection.
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports
+// fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
+// to print error reports. V8 doesn't generate debug info for the JIT code,
+// so the slow unwinder may not work properly.
+// detect_stack_use_after_return=1 - use fake stack to delay the reuse of
+// stack allocations and detect stack-use-after-return errors.
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+ "legacy_pthread_cond=1 malloc_context_size=5 "
+ "symbolize=1 check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+ "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "allow_user_segv_handler=1 ";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char* kAsanDefaultOptions =
+ "symbolize=1 check_printf=1 use_sigaltstack=1 "
+ "detect_leaks=0 strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "detect_stack_use_after_return=1 "
+ "allow_user_segv_handler=1 ";
+#endif // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+ "check_printf=1 use_sigaltstack=1 "
+ "strip_path_prefix=/../../ fast_unwind_on_fatal=1 "
+ "detect_stack_use_after_return=1 detect_odr_violation=0 ";
+#endif // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX)
+// Allow NaCl to override the default asan options.
+extern const char* kAsanDefaultOptionsNaCl;
+__attribute__((weak)) const char* kAsanDefaultOptionsNaCl = nullptr;
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
+ if (kAsanDefaultOptionsNaCl)
+ return kAsanDefaultOptionsNaCl;
+ return kAsanDefaultOptions;
+}
+
+extern char kASanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
+ return kASanDefaultSuppressions;
+}
+#endif // OS_LINUX || OS_MACOSX
+#endif // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+// detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+// second_deadlock_stack=1 - more verbose deadlock reports.
+// report_signal_unsafe=0 - do not report async-signal-unsafe functions
+// called from signal handlers.
+// report_thread_leaks=0 - do not report unjoined threads at the end of
+// the program execution.
+// print_suppressions=1 - print the list of matched suppressions.
+// history_size=7 - make the history buffer proportional to 2^7 (the maximum
+// value) to keep more stack traces.
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+ "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+ "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+ "strict_memcmp=0 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
+ return kTsanDefaultOptions;
+}
+
+extern char kTSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
+ return kTSanDefaultSuppressions;
+}
+
+#endif // THREAD_SANITIZER && OS_LINUX
+
+#if defined(MEMORY_SANITIZER)
+// Default options for MemorySanitizer:
+// intercept_memcmp=0 - do not detect uninitialized memory in memcmp() calls.
+// Pending cleanup, see http://crbug.com/523428
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kMsanDefaultOptions[] =
+ "intercept_memcmp=0 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__msan_default_options() {
+ return kMsanDefaultOptions;
+}
+
+#endif // MEMORY_SANITIZER
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+// print_suppressions=1 - print the list of matched suppressions.
+// strip_path_prefix=/../../ - prefixes up to and including this
+// substring will be stripped from source file paths in symbolized reports.
+const char kLsanDefaultOptions[] =
+ "print_suppressions=1 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+ return kLsanDefaultOptions;
+}
+
+extern char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+ return kLSanDefaultSuppressions;
+}
+
+#endif // LEAK_SANITIZER
+
+#if defined(UNDEFINED_SANITIZER)
+// Default options for UndefinedBehaviorSanitizer:
+// print_stacktrace=1 - print the stacktrace when UBSan reports an error.
+const char kUbsanDefaultOptions[] =
+ "print_stacktrace=1 strip_path_prefix=/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char* __ubsan_default_options() {
+ return kUbsanDefaultOptions;
+}
+
+#endif // UNDEFINED_SANITIZER
diff --git a/dsoftbus/build/misc/sanitizers/tsan_suppressions.cc b/dsoftbus/build/misc/sanitizers/tsan_suppressions.cc
new file mode 100755
index 0000000000000000000000000000000000000000..4b2fb0907e152aa4d7fd75d5dd1086d1d4f35164
--- /dev/null
+++ b/dsoftbus/build/misc/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,267 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+ // False positives in libflashplayer.so, libgio.so and libglib.so.
+ // Since we don't instrument them, we cannot reason about the
+ // synchronization in them.
+ "race:libflashplayer.so\n"
+ "race:libgio*.so\n"
+ "race:libglib*.so\n"
+
+ // Intentional race in ToolsSanityTest.DataRace in base_unittests.
+ "race:base/tools_sanity_unittest.cc\n"
+
+ // Data race on WatchdogCounter [test-only].
+ "race:base/threading/watchdog_unittest.cc\n"
+
+ // Races in libevent, http://crbug.com/23244.
+ "race:libevent/event.c\n"
+
+ // http://crbug.com/84094.
+ "race:sqlite3StatusSet\n"
+ "race:pcache1EnforceMaxPage\n"
+ "race:pcache1AllocPage\n"
+
+ // http://crbug.com/120808
+ "race:base/threading/watchdog.cc\n"
+
+ // http://crbug.com/157586
+ "race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+ // http://crbug.com/158718
+ "race:third_party/ffmpeg/libavcodec/pthread.c\n"
+ "race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+ "race:third_party/ffmpeg/libavcodec/vp8.c\n"
+ "race:third_party/ffmpeg/libavutil/mem.c\n"
+ "race:*HashFrameForTesting\n"
+ "race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+ "race:media::ReleaseData\n"
+
+ // http://crbug.com/158922
+ "race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
+ "race:third_party/libvpx/source/libvpx/vp9/encoder/*\n"
+
+ // http://crbug.com/239359
+ "race:media::TestInputCallback::OnData\n"
+
+ // http://crbug.com/244368
+ "race:skia::BeginPlatformPaint\n"
+
+ // http://crbug.com/244385
+ "race:unixTempFileDir\n"
+
+ // http://crbug.com/244755
+ "race:v8::internal::Zone::NewExpand\n"
+ "race:TooLateToEnableNow\n"
+ "race:adjust_segment_bytes_allocated\n"
+
+ // http://crbug.com/244774
+ "race:webrtc::RTPReceiver::ProcessBitrate\n"
+ "race:webrtc::RTPSender::ProcessBitrate\n"
+ "race:webrtc::VideoCodingModuleImpl::Decode\n"
+ "race:webrtc::RTPSender::SendOutgoingData\n"
+ "race:webrtc::LibvpxVp8Encoder::GetEncodedPartitions\n"
+ "race:webrtc::LibvpxVp8Encoder::Encode\n"
+ "race:webrtc::ViEEncoder::DeliverFrame\n"
+ "race:webrtc::vcm::VideoReceiver::Decode\n"
+ "race:webrtc::VCMReceiver::FrameForDecoding\n"
+ "race:*trace_event_unique_catstatic*\n"
+
+ // http://crbug.com/244856
+ "race:libpulsecommon*.so\n"
+
+ // http://crbug.com/246968
+ "race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+ // http://crbug.com/257396
+ "race:base::trace_event::"
+ "TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
+
+ // http://crbug.com/258479
+ "race:SamplingStateScope\n"
+ "race:g_trace_state\n"
+
+ // http://crbug.com/258499
+ "race:third_party/skia/include/core/SkRefCnt.h\n"
+
+ // http://crbug.com/268924
+ "race:base::g_power_monitor\n"
+ "race:base::PowerMonitor::PowerMonitor\n"
+ "race:base::PowerMonitor::AddObserver\n"
+ "race:base::PowerMonitor::RemoveObserver\n"
+ "race:base::PowerMonitor::IsOnBatteryPower\n"
+
+ // http://crbug.com/258935
+ "race:base::Thread::StopSoon\n"
+
+ // http://crbug.com/272095
+ "race:base::g_top_manager\n"
+
+ // http://crbug.com/308590
+ "race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+ // http://crbug.com/310851
+ "race:net::ProxyResolverV8Tracing::Job::~Job\n"
+
+ // http://crbug.com/476529
+ "deadlock:cc::VideoLayerImpl::WillDraw\n"
+
+ // http://crbug.com/328826
+ "race:gLCDOrder\n"
+ "race:gLCDOrientation\n"
+
+ // http://crbug.com/328868
+ "race:PR_Lock\n"
+
+ // http://crbug.com/333244
+ "race:content::"
+ "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+ // http://crbug.com/333871
+ "race:v8::internal::Interface::NewValue()::value_interface\n"
+ "race:v8::internal::IsMinusZero(double)::minus_zero\n"
+ "race:v8::internal::FastCloneShallowObjectStub::"
+ "InitializeInterfaceDescriptor\n"
+ "race:v8::internal::KeyedLoadStubCompiler::registers\n"
+ "race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
+ "race:v8::internal::KeyedLoadFastElementStub::"
+ "InitializeInterfaceDescriptor\n"
+ "race:v8::internal::KeyedStoreFastElementStub::"
+ "InitializeInterfaceDescriptor\n"
+ "race:v8::internal::LoadStubCompiler::registers\n"
+ "race:v8::internal::StoreStubCompiler::registers\n"
+ "race:v8::internal::HValue::LoopWeight\n"
+
+ // http://crbug.com/334140
+ "race:CommandLine::HasSwitch\n"
+ "race:CommandLine::current_process_commandline_\n"
+ "race:CommandLine::GetSwitchValueASCII\n"
+
+ // http://crbug.com/338675
+ "race:blink::s_platform\n"
+ "race:content::"
+ "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
+
+ // http://crbug.com/347534
+ "race:v8::internal::V8::TearDown\n"
+
+ // http://crbug.com/347538
+ "race:sctp_timer_start\n"
+
+ // http://crbug.com/347553
+ "race:blink::WebString::reset\n"
+
+ // http://crbug.com/348511
+ "race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+ // http://crbug.com/348982
+ "race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+ "race:cricket::P2PTransportChannel::AddConnection\n"
+
+ // http://crbug.com/348984
+ "race:sctp_express_handle_sack\n"
+ "race:system_base_info\n"
+
+ // https://code.google.com/p/v8/issues/detail?id=3143
+ "race:v8::internal::FLAG_track_double_fields\n"
+
+ // http://crbug.com/374135
+ "race:media::AlsaWrapper::PcmWritei\n"
+
+ // False positive in libc's tzset_internal, http://crbug.com/379738.
+ "race:tzset_internal\n"
+
+ // http://crbug.com/380554
+ "deadlock:g_type_add_interface_static\n"
+
+ // http:://crbug.com/386385
+ "race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+ // http://crbug.com/388730
+ "race:g_next_user_script_id\n"
+
+ // http://crbug.com/397022
+ "deadlock:"
+ "base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::"
+ "TestBody\n"
+
+ // http://crbug.com/415472
+ "deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
+
+ // http://crbug.com/490856
+ "deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
+
+ // https://code.google.com/p/skia/issues/detail?id=3294
+ "race:SkBaseMutex::acquire\n"
+
+ // https://crbug.com/430533
+ "race:TileTaskGraphRunner::Run\n"
+
+ // Lock inversion in third party code, won't fix.
+ // https://crbug.com/455638
+ "deadlock:dbus::Bus::ShutdownAndBlock\n"
+
+ // https://crbug.com/459429
+ "race:randomnessPid\n"
+
+ // https://crbug.com/454655
+ "race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n"
+
+ // https://crbug.com/569682
+ "race:blink::ThreadState::visitStackRoots\n"
+
+ // http://crbug.com/582274
+ "race:usrsctp_close\n"
+
+ // http://crbug.com/633145
+ "race:third_party/libjpeg_turbo/simd/jsimd_x86_64.c\n"
+
+ // http://crbug.com/587199
+ "race:base::TimerTest_OneShotTimer_CustomTaskRunner_Test::TestBody\n"
+ "race:base::TimerSequenceTest_OneShotTimerTaskOnPoolSequence_Test::"
+ "TestBody\n"
+ "race:base::TimerSequenceTest_"
+ "OneShotTimerUsedAndTaskedOnDifferentSequences\n"
+
+ // http://crbug.com/v8/6065
+ "race:net::(anonymous namespace)::ProxyResolverV8TracingImpl::RequestImpl"
+ "::~RequestImpl()\n"
+
+ // http://crbug.com/691029
+ "deadlock:libGLX.so*\n"
+
+ // http://crbug.com/719633
+ "race:crypto::EnsureNSSInit()\n"
+
+ // http://crbug.com/695929
+ "race:base::i18n::IsRTL\n"
+ "race:base::i18n::SetICUDefaultLocale\n"
+
+ // https://crbug.com/794920
+ "race:base::debug::SetCrashKeyString\n"
+ "race:crash_reporter::internal::CrashKeyStringImpl::Set\n"
+
+ // http://crbug.com/795110
+ "race:third_party/fontconfig/*\n"
+
+ // http://crbug.com/797998
+ "race:content::SandboxIPCHandler::HandleLocaltime\n"
+
+ // End of suppressions.
+ ; // Please keep this semicolon.
+
+#endif // THREAD_SANITIZER
diff --git a/dsoftbus/build/ohos.gni b/dsoftbus/build/ohos.gni
new file mode 100755
index 0000000000000000000000000000000000000000..77be836c12dbe6b295f79564fd820f4b9cf052ec
--- /dev/null
+++ b/dsoftbus/build/ohos.gni
@@ -0,0 +1,30 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/ohos/ndk/ndk.gni")
+import("//build/ohos/notice/notice.gni")
+import("//build/ohos/sa_profile/sa_profile.gni")
+import("//build/ohos_var.gni")
+import("//build/toolchain/toolchain.gni")
+
+# import cxx base templates
+import("//build/templates/cxx/cxx.gni")
+
+import("//build/ohos/ace/ace.gni")
+import("//build/ohos/app/app.gni")
+
+import("//build/templates/common/ohos_templates.gni")
+
+# import prebuilt templates
+import("//build/templates/cxx/prebuilt.gni")
diff --git a/dsoftbus/build/ohos/ace/ace.gni b/dsoftbus/build/ohos/ace/ace.gni
new file mode 100644
index 0000000000000000000000000000000000000000..60b1082bac9edd74dfdae0453d7bc8175dfb7c0c
--- /dev/null
+++ b/dsoftbus/build/ohos/ace/ace.gni
@@ -0,0 +1,67 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/python.gni")
+import("//build/templates/common/copy.gni")
+
+template("js_declaration") {
+ group(target_name) {
+ not_needed(invoker, "*")
+ }
+}
+
+template("gen_js_obj") {
+ name = target_name
+ action("gen_js_obj_" + name) {
+ visibility = [ ":*" ]
+
+ objcopy_tool = "${clang_base_path}/bin/llvm-objcopy"
+ platform = "${current_os}_${current_cpu}"
+ if (platform == "mingw_x86_64") {
+ script =
+ "//third_party/flutter/engine/flutter/sky/tools/objcopy_pc_mac.py"
+ } else if (platform == "mac_x64") {
+ script =
+ "//third_party/flutter/engine/flutter/sky/tools/objcopy_pc_mac.py"
+ } else if (target_cpu == "x86_64") {
+ script = "//third_party/flutter/engine/flutter/sky/tools/objcopy.py"
+ } else if (target_cpu == "arm" || target_cpu == "arm64") {
+ script = "//third_party/flutter/engine/flutter/sky/tools/objcopy.py"
+ }
+
+ args = [
+ "--objcopy",
+ rebase_path(objcopy_tool),
+ "--input",
+ rebase_path(invoker.input),
+ "--output",
+ rebase_path(invoker.output),
+ "--arch",
+ current_cpu,
+ ]
+
+ deps = []
+ if (defined(invoker.dep)) {
+ deps += [ invoker.dep ]
+ }
+
+ inputs = [ invoker.input ]
+ outputs = [ invoker.output ]
+ }
+
+ source_set(name) {
+ sources = [ invoker.output ]
+ deps = [ ":gen_js_obj_" + name ]
+ }
+}
diff --git a/dsoftbus/build/ohos/app/app.gni b/dsoftbus/build/ohos/app/app.gni
new file mode 100644
index 0000000000000000000000000000000000000000..cab20ee18e41e50ac7707591ba7d729787211d4f
--- /dev/null
+++ b/dsoftbus/build/ohos/app/app.gni
@@ -0,0 +1,318 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/app/app_internal.gni")
+import("//build/ohos/notice/notice.gni")
+import("//build/ohos_var.gni")
+import("//build/templates/metadata/module_info.gni")
+
+# Declare an OHOS assets target
+#
+# Variables:
+# deps: dependencies of this target.
+# sources: list of asset files to be included in hap.
+#
+template("ohos_assets") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.sources), "sources must be defined for ohos_assets")
+ _metadata = "$target_gen_dir/$target_name.metadata"
+
+ _metadata_target = "${target_name}__metadata"
+ write_meta_data(_metadata_target) {
+ forward_variables_from(invoker, [ "deps" ])
+ type = "assets"
+ meta_data = _metadata
+ assets = invoker.sources
+ }
+ group(target_name) {
+ public_deps = [ ":$_metadata_target" ]
+ }
+}
+
+# Declare a js assets target
+#
+# Variables:
+# source_dir: js assets directory, only one directory supported, string not list.
+# deps: dependencies of this target.
+#
+template("ohos_js_assets") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.source_dir),
+ "source_dir must be defined for ohos_js_assets")
+
+ _metadata = "$target_gen_dir/$target_name.metadata"
+ _metadata_target = "${target_name}__metadata"
+ write_meta_data(_metadata_target) {
+ forward_variables_from(invoker,
+ [
+ "source_dir",
+ "deps",
+ ])
+ type = "js_assets"
+ meta_data = _metadata
+ assets = [ source_dir ]
+ }
+ group(target_name) {
+ public_deps = [ ":$_metadata_target" ]
+ }
+}
+
+# Declare an OHOS resource target
+#
+# Variables:
+# deps: dependencies of this target.
+# sources: list of resource files to be compiled.
+# hap_profile: hap_profile is used when building resources.
+#
+template("ohos_resources") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.sources), "sources must be defined for ohos_resources")
+ _metadata = "$target_gen_dir/$target_name.metadata"
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+ _metadata_target = "${target_name}__metadata"
+ write_meta_data(_metadata_target) {
+ forward_variables_from(invoker,
+ [
+ "package_name",
+ "hap_profile",
+ ])
+ type = "resources"
+ meta_data = _metadata
+ resources = invoker.sources
+ deps = _deps
+ }
+
+ _deps += [ ":$_metadata_target" ]
+
+ # compile_resources target here only used for resources.h generation
+ _compile_resources_target = "${target_name}__compile_resources"
+ _generated_header_dir = "$target_gen_dir/$target_name"
+ compile_resources(_compile_resources_target) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "package_name",
+ "hap_profile",
+ ])
+ meta_data = _metadata
+ generated_header_dir = _generated_header_dir
+ deps = _deps
+ }
+
+ _resources_config = "${target_name}__resources_headers"
+ config(_resources_config) {
+ include_dirs = [ _generated_header_dir ]
+ }
+
+ group(target_name) {
+ public_deps = [ ":$_metadata_target" ]
+ deps = [ ":$_compile_resources_target" ]
+ public_configs = [ ":$_resources_config" ]
+ }
+}
+
+# Declare an OHOS hap target
+#
+# Variables:
+# deps: dependencies of this target.
+# hap_name: name of output hap file.
+# final_hap_path: full path of output hap file, final_hap_path overwrites hap_name.
+# js2abc: whether to transform js bundle to ark bytecode, default value
+# follows build_ark.
+#
+#
+template("ohos_hap") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.hap_profile),
+ "hap_profile must be defined for ohos_hap")
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps = invoker.deps
+ }
+
+ _hap_profile = invoker.hap_profile
+ _hap_name = target_name
+ if (defined(invoker.hap_name)) {
+ _hap_name = invoker.hap_name
+ }
+
+ _hap_path = "$target_out_dir/$_hap_name.hap"
+ if (defined(invoker.final_hap_path)) {
+ _hap_path = invoker.final_hap_path
+ }
+
+ _js2abc = false
+ if (defined(invoker.js2abc)) {
+ _js2abc = invoker.js2abc
+ }
+
+ _js_build_mode = "release"
+ if (defined(invoker.js_build_mode)) {
+ _js_build_mode = invoker.js_build_mode
+ }
+
+ _main_target_name = target_name
+
+ if (defined(invoker.subsystem_name) && defined(invoker.part_name)) {
+ _subsystem_name = invoker.subsystem_name
+ _part_name = invoker.part_name
+ } else if (defined(invoker.part_name)) {
+ _part_name = invoker.part_name
+ _part_subsystem_info_file =
+ "$root_build_dir/build_configs/parts_info/part_subsystem.json"
+ _arguments = [
+ "--part-name",
+ part_name,
+ "--part-subsystem-info-file",
+ rebase_path(_part_subsystem_info_file, root_build_dir),
+ ]
+ _get_subsystem_script = "//build/templates/common/get_subsystem_name.py"
+ _subsystem_name =
+ exec_script(get_subsystem_script, _arguments, "trim string")
+ } else if (defined(invoker.subsystem_name)) {
+ _subsystem_name = invoker.subsystem_name
+ _part_name = _subsystem_name
+ } else {
+ _subsystem_name = "common"
+ _part_name = _subsystem_name
+ }
+
+ assert(_subsystem_name != "") # Mark as used
+ assert(_part_name != "") # Mark as used
+
+ # metadata is used to record all necessary data for hap.
+ _metadata_target = "${target_name}__metadata"
+ _meta_data = "$target_gen_dir/$target_name.metadata"
+ write_meta_data(_metadata_target) {
+ meta_data = _meta_data
+ possible_deps = _deps
+ type = "hap"
+ hap_path = _hap_path
+ }
+
+ _js_assets_target = "${target_name}__js_assets"
+ _packaged_js_assets = "$target_out_dir/$target_name/js_assets.zip"
+ compile_js_assets(_js_assets_target) {
+ hap_profile = _hap_profile
+ packaged_js_assets = _packaged_js_assets
+ meta_data = _meta_data
+ deps = [ ":$_metadata_target" ] + _deps
+ build_mode = _js_build_mode
+ js2abc = _js2abc
+ }
+
+ _resources_target = "${target_name}__compile_resources"
+ _packaged_resources = "$target_out_dir/$target_name/resources.zip"
+ _generated_header_dir = "$target_out_dir/$target_name"
+ compile_resources(_resources_target) {
+ hap_profile = _hap_profile
+ packaged_resources = _packaged_resources
+ generated_header_dir = _generated_header_dir
+ meta_data = _meta_data
+ deps = [ ":$_metadata_target" ] + _deps
+ }
+
+ _notice_target = "${target_name}__notice"
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "license_as_sources",
+ "license_file",
+ ])
+ module_type = "app"
+ module_name = _main_target_name
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ }
+
+ package_app(target_name) {
+ forward_variables_from(invoker,
+ [
+ "shared_libraries",
+ "private_key_path",
+ "signature_algorithm",
+ "certificate_profile",
+ "keystore_path",
+ "keystore_password",
+ "key_alias",
+ "certificate_file",
+ ])
+ deps = [
+ ":$_js_assets_target",
+ ":$_metadata_target",
+ ":$_notice_target",
+ ":$_resources_target",
+ ]
+ if (defined(shared_libraries)) {
+ deps += shared_libraries
+ }
+ packaged_js_assets = _packaged_js_assets
+ packaged_resources = _packaged_resources
+ hap_profile = _hap_profile
+ meta_data = _meta_data
+ hap_path = _hap_path
+
+ install_module_info = {
+ module_def = get_label_info(":$target_name", "label_with_toolchain")
+ module_info_file =
+ rebase_path(get_label_info(module_def, "target_out_dir"),
+ root_build_dir) + "/${target_name}_module_info.json"
+ subsystem_name = _subsystem_name
+ part_name = _part_name
+ toolchain = current_toolchain
+ toolchain_out_dir = rebase_path(root_out_dir, root_build_dir)
+ }
+ }
+
+ generate_module_info("${_main_target_name}_info") {
+ module_name = _main_target_name
+ module_type = "app"
+
+ module_source_dir = get_path_info(_hap_path, "dir")
+ module_install_name = get_path_info(_hap_path, "name")
+ if (defined(invoker.module_install_name)) {
+ module_install_name = invoker.module_install_name
+ }
+
+ module_install_images = [ "system" ]
+ if (defined(invoker.install_images)) {
+ module_install_images = []
+ module_install_images += invoker.install_images
+ }
+ install_enable = true
+ if (defined(invoker.install_enable)) {
+ install_enable = invoker.enable
+ }
+
+ module_output_extension = ".hap"
+
+ module_install_dir = "${module_type}"
+ if (defined(invoker.module_install_dir)) {
+ module_install_dir = invoker.module_install_dir
+ }
+
+ if (defined(invoker.relative_install_dir)) {
+ relative_install_dir = invoker.relative_install_dir
+ }
+
+ if (defined(invoker.symlink_target_name)) {
+ symlink_target_name = invoker.symlink_target_name
+ }
+ notice = "$target_out_dir/$_main_target_name.notice.txt"
+ }
+}
diff --git a/dsoftbus/build/ohos/app/app_internal.gni b/dsoftbus/build/ohos/app/app_internal.gni
new file mode 100644
index 0000000000000000000000000000000000000000..7171330e6fb12bb97f18d18cdc32ca99ac5682e6
--- /dev/null
+++ b/dsoftbus/build/ohos/app/app_internal.gni
@@ -0,0 +1,258 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos_var.gni")
+import("//build/templates/metadata/module_info.gni")
+
+template("compile_resources") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.hap_profile),
+ "hap_profile must be defined for compile_resources")
+ assert(
+ defined(invoker.packaged_resources) ||
+ defined(invoker.generated_header_dir),
+ "hap_profile or generated_header_dir must be defined for compile_resources")
+ _generate_resource = defined(invoker.packaged_resources)
+ if (_generate_resource) {
+ _packaged_resources = invoker.packaged_resources
+ }
+ _generate_header = defined(invoker.generated_header_dir)
+ if (_generate_header) {
+ _generated_header_file = invoker.generated_header_dir + "/ResourceTable.h"
+ }
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "meta_data",
+ "hap_profile",
+ "deps",
+ ])
+ script = "//build/scripts/compile_resources.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ _rebased_metadata = rebase_path(meta_data, root_build_dir)
+ inputs = [
+ meta_data,
+ restool,
+ hap_profile,
+ ]
+ args = [
+ "--resources-dir=@FileArg($_rebased_metadata:root:resources)",
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--restool-path",
+ rebase_path(restool, root_build_dir),
+ "--hap-profile",
+ rebase_path(hap_profile, root_build_dir),
+ ]
+ outputs = []
+ if (_generate_resource) {
+ outputs += [ _packaged_resources ]
+ args += [
+ "--output-resources-zipfile",
+ rebase_path(_packaged_resources, root_build_dir),
+ ]
+ }
+ if (_generate_header) {
+ outputs += [ _generated_header_file ]
+ args += [
+ "--output-header-file",
+ rebase_path(_generated_header_file, root_build_dir),
+ ]
+ }
+ }
+}
+
+template("package_app") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.hap_profile),
+ "hap_profile must be defined for package_app")
+ assert(defined(invoker.hap_path), "hap_path must be defined for package_app")
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "shared_libraries",
+ "hap_profile",
+ "hap_path",
+ "packaged_resources",
+ "packaged_js_assets",
+ "meta_data",
+ "private_key_path",
+ "signature_algorithm",
+ "certificate_profile",
+ "keystore_path",
+ "keystore_password",
+ "key_alias",
+ "certificate_file",
+ "install_module_info",
+ ])
+ script = "//build/scripts/hapbuilder.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ _rebased_metadata = rebase_path(meta_data, root_build_dir)
+ outputs = [ hap_path ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--hap-profile",
+ rebase_path(hap_profile, root_build_dir),
+ "--hapsigner",
+ rebase_path(hapsigner, root_build_dir),
+ "--hap-packing-tool",
+ rebase_path(hap_packing_tool, root_build_dir),
+ "--hap-path",
+ rebase_path(hap_path, root_build_dir),
+ "--packaged-resources",
+ rebase_path(packaged_resources, root_build_dir),
+ "--packaged-js-assets",
+ rebase_path(packaged_js_assets, root_build_dir),
+ "--assets=@FileArg($_rebased_metadata:root:assets)",
+ ]
+
+ inputs = [
+ hap_profile,
+ hapsigner,
+ hap_packing_tool,
+ packaged_js_assets,
+ packaged_resources,
+ meta_data,
+ ]
+
+ _private_key_path = default_hap_private_key_path
+ if (defined(private_key_path)) {
+ _private_key_path = private_key_path
+ }
+ _signature_algorithm = default_signature_algorithm
+ if (defined(signature_algorithm)) {
+ _signature_algorithm = signature_algorithm
+ }
+ _key_alias = default_key_alias
+ if (defined(key_alias)) {
+ _key_alias = key_alias
+ }
+ _keystore_path = default_keystore_path
+ if (defined(keystore_path)) {
+ _keystore_path = keystore_path
+ }
+ _keystore_password = default_keystore_password
+ if (defined(keystore_password)) {
+ _keystore_password = keystore_password
+ }
+ _certificate_file = default_hap_certificate_file
+ if (defined(certificate_file)) {
+ _certificate_file = certificate_file
+ }
+
+ inputs += [
+ certificate_profile,
+ _keystore_path,
+ _certificate_file,
+ ]
+
+ args += [
+ "--private-key-path",
+ _private_key_path,
+ "--sign-algo",
+ _signature_algorithm,
+ "--certificate-profile",
+ rebase_path(certificate_profile, root_build_dir),
+ "--keyalias",
+ _key_alias,
+ "--keystore-path",
+ rebase_path(_keystore_path, root_build_dir),
+ "--keystorepasswd",
+ _keystore_password,
+ "--certificate-file",
+ rebase_path(_certificate_file, root_build_dir),
+ ]
+
+ if (defined(shared_libraries)) {
+ foreach(lib, shared_libraries) {
+ _module_info = get_label_info(lib, "target_out_dir") + "/" +
+ get_label_info(lib, "name") + "_module_info.json"
+ _rebased_module_info = rebase_path(_module_info, root_build_dir)
+ args += [ "--dso=@FileArg($_rebased_module_info:source)" ]
+ }
+ }
+
+ if (defined(install_module_info)) {
+ metadata = {
+ install_modules = [ install_module_info ]
+ }
+ }
+ }
+}
+
+template("compile_js_assets") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.meta_data) && defined(invoker.packaged_js_assets),
+ "meta_data and packaged_js_assets must be for compile_js_assets")
+
+ _packaged_js_assets = invoker.packaged_js_assets
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "meta_data",
+ "hap_profile",
+ "build_mode",
+ "js2abc",
+ ])
+ script = "//build/scripts/build_js_assets.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ outputs = [ _packaged_js_assets ]
+
+ _rebased_metadata = rebase_path(meta_data, root_build_dir)
+
+ inputs = [
+ meta_data,
+ nodejs,
+ webpack_js,
+ webpack_config_js,
+ ]
+ args = [
+ "--js-assets-dir=@FileArg($_rebased_metadata:root:js_assets)",
+ "--output",
+ rebase_path(_packaged_js_assets, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--nodejs-path",
+ rebase_path(nodejs, root_build_dir),
+ "--webpack-js",
+ rebase_path(webpack_js, root_build_dir),
+ "--webpack-config-js",
+ rebase_path(webpack_config_js, root_build_dir),
+ "--build-mode",
+ build_mode,
+ ]
+ if (js2abc) {
+ args += [ "--js2abc" ]
+ }
+ if (defined(hap_profile)) {
+ args += [
+ "--hap-profile",
+ rebase_path(hap_profile, root_build_dir),
+ ]
+ inputs += [ hap_profile ]
+ }
+ }
+}
+
+# Placeholder
+template("js2ark") {
+ group(target_name) {
+ not_needed(invoker, "*", [ "deps" ])
+ forward_variables_from(invoker, [ "deps" ])
+ }
+}
diff --git a/dsoftbus/build/ohos/build_var.gni b/dsoftbus/build/ohos/build_var.gni
new file mode 100644
index 0000000000000000000000000000000000000000..1e383c4ee74f514dd39470f6363a5e3cfa034f05
--- /dev/null
+++ b/dsoftbus/build/ohos/build_var.gni
@@ -0,0 +1,21 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_configs_path = "${root_build_dir}/build_configs"
+
+product_output_dir = "$root_build_dir/packages"
+
+parts_variants_file = "${build_configs_path}/parts_variants.json"
+
+variants_toolchain_file =
+ "${build_configs_path}/platforms_info/toolchain_to_variant.json"
diff --git a/dsoftbus/build/ohos/common/BUILD.gn b/dsoftbus/build/ohos/common/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..7d6d2a314be71a995f42dd1c88579432bdeb8144
--- /dev/null
+++ b/dsoftbus/build/ohos/common/BUILD.gn
@@ -0,0 +1,84 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("${root_build_dir}/build_configs/parts_list.gni")
+
+# build target type
+target_type = "${target_os}-${target_cpu}"
+dist_dir_name = "component_dist/${target_type}"
+
+# dist_build_out_dir = "${root_build_dir}/component_dist/${target_type}"
+dist_subsystem_info_filename = "dist_parts_info.json"
+
+# install packages archive dir define
+dist_install_packages_dir = "${dist_dir_name}/packages_to_install"
+dist_subsystem_info_file =
+ "//${dist_install_packages_dir}/${dist_subsystem_info_filename}"
+
+src_installed_info_file = "${root_build_dir}/src_installed_parts.json"
+binary_installed_info_file = "${root_build_dir}/binary_installed_parts.json"
+
+all_subsystem_info_file = "${root_build_dir}/all_parts_info.json"
+
+generated_file("generate_src_installed_info") {
+ deps = []
+ foreach(part_label, parts_list) {
+ deps += [ get_label_info(part_label, "label_with_toolchain") ]
+ }
+ outputs = [ src_installed_info_file ]
+ data_keys = [ "part_installed_info" ]
+ output_conversion = "json"
+}
+
+action_with_pydeps("gen_binary_installed_info") {
+ deps = [ ":generate_src_installed_info" ]
+ script = "//build/ohos/common/binary_install_info.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ inputs = [ src_installed_info_file ]
+ outputs = [ binary_installed_info_file ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--dist-parts-info-file",
+ rebase_path(dist_subsystem_info_file, root_build_dir),
+ "--parts-src-installed-info-file",
+ rebase_path(src_installed_info_file, root_build_dir),
+ "--binary-installed-info-file",
+ rebase_path(binary_installed_info_file, root_build_dir),
+ ]
+}
+
+action_with_pydeps("merge_all_parts") {
+ script = "//build/ohos/common/merge_all_subsystem.py"
+ deps = [
+ ":gen_binary_installed_info",
+ ":generate_src_installed_info",
+ ]
+ sources = [
+ binary_installed_info_file,
+ src_installed_info_file,
+ ]
+ outputs = [ all_subsystem_info_file ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--src-install-info-file",
+ rebase_path(src_installed_info_file, root_build_dir),
+ "--binary-install-info-file",
+ rebase_path(binary_installed_info_file, root_build_dir),
+ "--all-subsystem-info-file",
+ rebase_path(all_subsystem_info_file, root_build_dir),
+ ]
+}
diff --git a/dsoftbus/build/ohos/common/binary_install_info.py b/dsoftbus/build/ohos/common/binary_install_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..e3ae623dcbcd2912cfcf8181bc928c4d113d1f95
--- /dev/null
+++ b/dsoftbus/build/ohos/common/binary_install_info.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def binary_install_subsystem(dist_parts_info_file,
+ parts_src_installed_info_file):
+ # These subsystem have binary install packages
+ if not os.path.exists(dist_parts_info_file):
+ print("dist subsystem info file [{}] no exist.".format(
+ dist_parts_info_file))
+ return []
+
+ dist_parts_info = read_json_file(dist_parts_info_file)
+ if dist_parts_info is None:
+ raise Exception("read install parts info file failed.")
+
+ parts_src_list = read_json_file(parts_src_installed_info_file)
+ if parts_src_list is None:
+ raise Exception("read subsystem src flag file failed.")
+ src_part_name_list = []
+ for _part_info in parts_src_list:
+ src_part_name_list.append(_part_info.get('part_name'))
+
+ required_subsystem = []
+ for info in dist_parts_info:
+ part_name = info.get('part_name')
+ if part_name not in src_part_name_list:
+ required_subsystem += [info]
+ return required_subsystem
+
+
+def output_installed_info(binary_subsystem_info, required_parts):
+ write_json_file(binary_subsystem_info, required_parts)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--dist-parts-info-file', required=True)
+ parser.add_argument('--parts-src-installed-info-file', required=True)
+ parser.add_argument('--binary-installed-info-file', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ required_parts = binary_install_subsystem(
+ args.dist_parts_info_file, args.parts_src_installed_info_file)
+
+ output_installed_info(args.binary_installed_info_file, required_parts)
+
+ if args.depfile:
+ _dep_files = []
+ _dep_files.append(args.dist_parts_info_file)
+ _dep_files.append(args.parts_src_installed_info_file)
+ build_utils.write_depfile(args.depfile,
+ args.binary_installed_info_file,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/common/binary_install_info.pydeps b/dsoftbus/build/ohos/common/binary_install_info.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..c7f67040caccf9025db421e39c89701ab783b692
--- /dev/null
+++ b/dsoftbus/build/ohos/common/binary_install_info.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/common --output build/ohos/common/binary_install_info.pydeps build/ohos/common/binary_install_info.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+binary_install_info.py
diff --git a/dsoftbus/build/ohos/common/merge_all_subsystem.py b/dsoftbus/build/ohos/common/merge_all_subsystem.py
new file mode 100755
index 0000000000000000000000000000000000000000..480c23695afe2e355ed29a1c75ec6ec0a23a0f2e
--- /dev/null
+++ b/dsoftbus/build/ohos/common/merge_all_subsystem.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def merge_subsystem(src_install_info_file, binary_install_info_file):
+ src_install_info = read_json_file(src_install_info_file)
+ if src_install_info is None:
+ raise Exception("read src install info file '{}' failed.".format(
+ src_install_info_file))
+
+ binary_install_info = None
+ if os.path.exists(binary_install_info_file):
+ binary_install_info = read_json_file(binary_install_info_file)
+ if binary_install_info is None:
+ raise Exception("read binary install file '{}' failed.".format(
+ binary_install_info_file))
+ else:
+ print("binary install info file '{}' no exist.".format(
+ binary_install_info_file))
+
+ merge_result = {}
+ for _subsystem_info in src_install_info:
+ part_name = _subsystem_info.get('part_name')
+ _subsystem_info['is_source'] = True
+ merge_result[part_name] = _subsystem_info
+ if binary_install_info:
+ for _subsystem_info in binary_install_info:
+ part_name = _subsystem_info.get('part_name')
+ _subsystem_info['is_source'] = False
+ merge_result[part_name] = _subsystem_info
+ return merge_result
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--src-install-info-file', required=True)
+ parser.add_argument('--binary-install-info-file', required=True)
+ parser.add_argument('--all-subsystem-info-file', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ all_subsystem_info = merge_subsystem(args.src_install_info_file,
+ args.binary_install_info_file)
+ write_json_file(args.all_subsystem_info_file, all_subsystem_info)
+
+ if args.depfile:
+ _dep_files = []
+ _dep_files.append(args.src_install_info_file)
+ _dep_files.append(args.binary_install_info_file)
+ build_utils.write_depfile(args.depfile,
+ args.all_subsystem_info_file,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/common/merge_all_subsystem.pydeps b/dsoftbus/build/ohos/common/merge_all_subsystem.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..72251c46ce3c4fe9703b248b28d908e9ed19ae73
--- /dev/null
+++ b/dsoftbus/build/ohos/common/merge_all_subsystem.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/common --output build/ohos/common/merge_all_subsystem.pydeps build/ohos/common/merge_all_subsystem.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+merge_all_subsystem.py
diff --git a/dsoftbus/build/ohos/copy_files.py b/dsoftbus/build/ohos/copy_files.py
new file mode 100755
index 0000000000000000000000000000000000000000..dacc490ff42131e0f0ddb206eec4dc8a45150543
--- /dev/null
+++ b/dsoftbus/build/ohos/copy_files.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import shutil
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import write_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def copy_files(source_files, output_dir):
+ output_files = []
+ for src_file in source_files:
+ if not os.path.exists(src_file):
+ raise Exception("src file '{}' doesn't exist.".format(src_file))
+ if not os.path.isfile(src_file):
+ continue
+ file_name = os.path.basename(src_file)
+ output_file = os.path.join(output_dir, file_name)
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir, exist_ok=True)
+ shutil.copy2(src_file, output_file)
+ output_files.append(output_file)
+ return output_files
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--source-files', nargs='+', required=True)
+ parser.add_argument('--copy-output-dir', required=True)
+ parser.add_argument('--outfile', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ copy_out_list = copy_files(args.source_files, args.copy_output_dir)
+ write_file(args.outfile, '\n'.join(copy_out_list))
+
+ if args.depfile:
+ _dep_files = []
+ _dep_files.extend(args.source_files)
+ _dep_files.extend(copy_out_list)
+ _dep_files.sort()
+ build_utils.write_depfile(args.depfile,
+ args.outfile,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/copy_files.pydeps b/dsoftbus/build/ohos/copy_files.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..cee18ec2e36fa9ad8b2312ffdf4eb3c347286a8c
--- /dev/null
+++ b/dsoftbus/build/ohos/copy_files.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos --output build/ohos/copy_files.pydeps build/ohos/copy_files.py
+../gn_helpers.py
+../scripts/__init__.py
+../scripts/util/__init__.py
+../scripts/util/build_utils.py
+../scripts/util/file_utils.py
+../scripts/util/md5_check.py
+../scripts/util/pycache.py
+copy_files.py
diff --git a/dsoftbus/build/ohos/ebpf.gni b/dsoftbus/build/ohos/ebpf.gni
new file mode 100644
index 0000000000000000000000000000000000000000..4ecdf42c622793c8160ee501809e321a82f534de
--- /dev/null
+++ b/dsoftbus/build/ohos/ebpf.gni
@@ -0,0 +1,65 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Defines a collect ebpf testcase template
+#
+# The collect_ebpf_testcase template is to collect the ebpf testcase to out/xxx/ebpf_testcase,
+# and record the configuration file.
+# Parameters
+# ebpf_testcase the ebp testcase of subsystem
+# subsystem_name
+#
+template("collect_ebpf_testcase") {
+ assert(defined(invoker.ebpf_testcase))
+ assert(defined(invoker.subsystem_name))
+ assert(defined(invoker.part_name))
+
+ forward_variables_from(invoker,
+ [
+ "ebpf_testcase",
+ "subsystem_name",
+ "part_name",
+ ])
+
+ deps = []
+ subsystem_testcase_collect_path =
+ "${root_out_dir}/ebpf_testcase/${subsystem_name}/${part_name}"
+ subsystem_testcase_config_file = "${subsystem_testcase_collect_path}/${target_name}_ebpf_testcase_config.json"
+ copy("${target_name}_copy_testcase") {
+ sources = []
+ sources += ebpf_testcase
+ outputs = [ "${subsystem_testcase_collect_path}/{{source_file_part}}" ]
+ }
+
+ src_testcase_list = []
+ foreach(testcase, ebpf_testcase) {
+ src_testcase_list += [ rebase_path(testcase, root_build_dir) ]
+ }
+
+ action("${target_name}") {
+ deps += [ ":${target_name}_copy_testcase" ]
+ script = "//build/gen_subsystem_ebpf_testcase_config.py"
+ sources = ebpf_testcase
+ outputs = [ subsystem_testcase_config_file ]
+ args = [
+ "--subsystem-name",
+ subsystem_name,
+ "--subsystem-ebpf-testcase-config-file",
+ rebase_path(subsystem_testcase_config_file, root_build_dir),
+ ]
+ args += [ "--subsystem-testcase-list" ]
+ args += src_testcase_list
+ args += [ "--subsystem-testcase-collect-path" ]
+ args += [ rebase_path(subsystem_testcase_collect_path, root_build_dir) ]
+ }
+}
diff --git a/dsoftbus/build/ohos/file_exists.py b/dsoftbus/build/ohos/file_exists.py
new file mode 100755
index 0000000000000000000000000000000000000000..30623aaadf2d200a441d89713e4b92f451bcf476
--- /dev/null
+++ b/dsoftbus/build/ohos/file_exists.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+
+
+def is_exists(filename, type):
+ result = False
+ if type == 'file':
+ result = os.path.exists(filename) and os.path.isfile(filename)
+ elif type == 'dir':
+ result = os.path.exists(filename) and os.path.isdir(filename)
+ return result
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--filename', required=True)
+ parser.add_argument('--type', default='file')
+ args = parser.parse_args()
+ result = is_exists(args.filename, args.type)
+ sys.stdout.write(str(result))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/generate_part_info.py b/dsoftbus/build/ohos/generate_part_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..73d7c595624f7f5c07586a047360c64ec9488c6c
--- /dev/null
+++ b/dsoftbus/build/ohos/generate_part_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+
+# read subsystem module, generate subsystem install list and deps list.
+def gen_output_file(part_name, origin_part_name, all_modules_file,
+ sdk_modules_info_file, install_modules_file,
+ dep_modules_file, current_toolchain):
+ # read subsystem module info
+ all_module_info = read_json_file(all_modules_file)
+ if all_module_info is None:
+ raise Exception(
+ "read part '{}' modules info failed.".format(part_name))
+
+ # merge sdk install modules, may be repeat
+ if os.path.exists(sdk_modules_info_file):
+ sdk_modules_info = read_json_file(sdk_modules_info_file)
+ if sdk_modules_info is not None:
+ all_module_info.extend(sdk_modules_info)
+
+ # Generate a list of modules by part
+ modules_info_dict = {}
+ modules_def = {} # remove duplicates
+ for info in all_module_info:
+ module_def = info.get('module_def')
+ if module_def in modules_def:
+ continue
+
+ modules_def[module_def] = ''
+ _module_part_name = info.get('part_name')
+ if _module_part_name not in modules_info_dict:
+ modules_info_dict[_module_part_name] = []
+ modules_info_dict[_module_part_name] += [info]
+
+ # Current part module list
+ part_module_list = []
+ if origin_part_name in modules_info_dict:
+ part_module_list = modules_info_dict.pop(origin_part_name)
+
+ # Current part dependent module list
+ part_install_modules = []
+ part_no_install_modules = []
+ for install_module in part_module_list:
+ toolchain = install_module.get('toolchain')
+ if toolchain == '' or toolchain == current_toolchain:
+ part_install_modules.append(install_module)
+ else:
+ part_no_install_modules.append(install_module)
+
+ # write install modules file
+ write_json_file(install_modules_file, part_install_modules)
+
+ # add no install modules to dict, example: host target
+ modules_info_dict[part_name] = part_no_install_modules
+ # write dep modules file
+ write_json_file(dep_modules_file, modules_info_dict)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--part-name', required=True)
+ parser.add_argument('--origin-part-name', required=True)
+ parser.add_argument('--input-file', help='', required=True)
+ parser.add_argument('--sdk-modules-info-file', help='', required=True)
+ parser.add_argument('--output-install-file', help='', required=True)
+ parser.add_argument('--output-deps-file', help='', required=True)
+ parser.add_argument('--current-toolchain', help='', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ gen_output_file(args.part_name, args.origin_part_name, args.input_file,
+ args.sdk_modules_info_file, args.output_install_file,
+ args.output_deps_file, args.current_toolchain)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/generate_part_info.pydeps b/dsoftbus/build/ohos/generate_part_info.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..ff5839e8519dbc6ee1490a1e8fde120fbad7c4b8
--- /dev/null
+++ b/dsoftbus/build/ohos/generate_part_info.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos --output build/ohos/generate_part_info.pydeps build/ohos/generate_part_info.py
+../scripts/__init__.py
+../scripts/util/__init__.py
+../scripts/util/file_utils.py
+generate_part_info.py
diff --git a/dsoftbus/build/ohos/images/BUILD.gn b/dsoftbus/build/ohos/images/BUILD.gn
new file mode 100644
index 0000000000000000000000000000000000000000..16e6b145ab665ef00d13a4ee95948d1cd1a60ee6
--- /dev/null
+++ b/dsoftbus/build/ohos/images/BUILD.gn
@@ -0,0 +1,87 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos.gni")
+import("//build/ohos/build_var.gni")
+
+# import target_platform_list
+import("${build_configs_path}/platforms_list.gni")
+group("make_images") {
+ deps = []
+ foreach(_platform, target_platform_list) {
+ deps += [
+ ":${_platform}_system_image",
+ ":${_platform}_updater_image",
+ ":${_platform}_userdata_image",
+ ":${_platform}_vendor_image",
+ ]
+ }
+}
+
+build_image_tools_path = "//third_party/e2fsprogs/prebuilt/host/bin"
+
+foreach(_platform, target_platform_list) {
+ current_platform = _platform
+ current_platform_dir = "${product_output_dir}/$current_platform"
+
+ system_module_info_list = "${current_platform_dir}/system_module_info.json"
+ system_modules_list = "${current_platform_dir}/system_modules_list.txt"
+
+ image_list = [
+ "system",
+ "vendor",
+ "userdata",
+ "updater",
+ ]
+ foreach(_image_name, image_list) {
+ action_with_pydeps("${_platform}_${_image_name}_image") {
+ script = "//build/ohos/images/build_image.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ deps = [ "//build/ohos/packages:${_platform}_install_modules" ]
+
+ image_config_file =
+ "//build/ohos/images/mkimage/${_image_name}_image_conf.txt"
+ output_image_file = "$current_platform_dir/images/${_image_name}.img"
+
+ image_input_path = "$current_platform_dir/${_image_name}"
+ if (_image_name == "userdata") {
+ image_input_path = "$current_platform_dir/data"
+ }
+
+ sources = [
+ image_config_file,
+ system_module_info_list,
+ system_modules_list,
+ ]
+ outputs = [ output_image_file ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--image-name",
+ _image_name,
+ "--input-path",
+ rebase_path(image_input_path, root_build_dir),
+ "--image-config-file",
+ rebase_path(image_config_file, root_build_dir),
+ "--output-image",
+ rebase_path(output_image_file, root_build_dir),
+ "--build-image-tools-path",
+ rebase_path(build_image_tools_path, root_build_dir),
+ ]
+ if (sparse_image) {
+ args += [ "--sparse-image" ]
+ }
+ }
+ }
+}
diff --git a/dsoftbus/build/ohos/images/build_image.py b/dsoftbus/build/ohos/images/build_image.py
new file mode 100755
index 0000000000000000000000000000000000000000..fc06024d892bcd6e9a0db48365d63fb5cdba1773
--- /dev/null
+++ b/dsoftbus/build/ohos/images/build_image.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import shutil
+import argparse
+from mkimage import mkimages
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+
+
+def _prepare_userdata(userdata_path):
+ if os.path.exists(userdata_path):
+ shutil.rmtree(userdata_path)
+ os.makedirs(userdata_path)
+
+
+def _prepare_root(system_path):
+ root_dir = os.path.join(os.path.dirname(system_path), 'root')
+ if os.path.exists(root_dir):
+ shutil.rmtree(root_dir)
+ os.makedirs(root_dir, exist_ok=True)
+ _dir_list = [
+ 'config', 'dev', 'proc', 'sys', 'updater', 'system', 'vendor', 'data'
+ ]
+ for _dir_name in _dir_list:
+ os.makedirs(os.path.join(root_dir, _dir_name), exist_ok=True)
+ os.symlink('/system/bin', os.path.join(root_dir, 'bin'))
+ os.symlink('/system/bin/init', os.path.join(root_dir, 'init'))
+ os.symlink('/system/etc', os.path.join(root_dir, 'etc'))
+
+
+def _prepare_updater(updater_path):
+ _dir_list = ['dev', 'proc', 'sys']
+ for _dir_name in _dir_list:
+ _path = os.path.join(updater_path, _dir_name)
+ if os.path.exists(_path):
+ continue
+ os.makedirs(_path, exist_ok=True)
+ os.symlink('/bin/init', os.path.join(updater_path, 'init'))
+
+
+def _make_image(args):
+ if args.image_name == 'system':
+ _prepare_root(args.input_path)
+ elif args.image_name == 'updater':
+ _prepare_updater(args.input_path)
+ image_type = "raw"
+ if args.sparse_image:
+ image_type = "sparse"
+ mk_image_args = [
+ args.input_path, args.image_config_file, args.output_image_path,
+ image_type
+ ]
+ env_path = "../../build/ohos/images/mkimage"
+ if args.build_image_tools_path:
+ env_path = '{}:{}'.format(env_path, args.build_image_tools_path)
+ os.environ['PATH'] = '{}:{}'.format(env_path, os.environ.get('PATH'))
+ mkimages.mk_images(mk_image_args)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--depfile', required=True)
+ parser.add_argument('--image-name', required=True)
+ parser.add_argument('--image-config-file', required=True)
+ parser.add_argument('--input-path', required=True)
+ parser.add_argument('--output-image-path', required=True)
+ parser.add_argument('--sparse-image',
+ dest="sparse_image",
+ action='store_true')
+ parser.set_defaults(sparse_image=False)
+ parser.add_argument('--build-image-tools-path', required=False)
+ args = parser.parse_args(argv)
+
+ if os.path.exists(args.output_image_path):
+ os.remove(args.output_image_path)
+ if args.image_name == 'userdata':
+ _prepare_userdata(args.input_path)
+ if os.path.isdir(args.input_path):
+ _make_image(args)
+ _dep_files = []
+ for _root, _, _files in os.walk(args.input_path):
+ for _file in _files:
+ _dep_files.append(os.path.join(_root, _file))
+ build_utils.write_depfile(args.depfile,
+ args.output_image_path,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/images/build_image.pydeps b/dsoftbus/build/ohos/images/build_image.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..812cdb2c36e257992938b744851443630fe579fd
--- /dev/null
+++ b/dsoftbus/build/ohos/images/build_image.pydeps
@@ -0,0 +1,3 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/images --output build/ohos/images/build_image.pydeps build/ohos/images/build_image.py
+build_image.py
diff --git a/dsoftbus/build/ohos/images/mkimage/README.txt b/dsoftbus/build/ohos/images/mkimage/README.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a40b9336fe78688b110d41df00a27a12c6ab1dad
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/README.txt
@@ -0,0 +1,54 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# 主要介绍拍包脚本使用的参数以及相关格式,文件行首字符为'#'为注释,拍包脚本扫描时将自动忽略
+# 拍包时调用脚本应该为mkimages.py 传递四个参数,即拍包使用的源文件路径,拍包时使用的参数的文件,拍包的目录镜像路径,以及拍包输出的out目录
+# 此文档时介绍拍包时使用参数文件的配置方法
+
+# 第一行固定为挂载路径,必选参数顺序要保持一直
+/vendor
+# 第二行固定为镜像制作大小
+268434944
+# 镜像文件系统类型,不可缺少,当前仅支持ext4,f2fs将在后续支持
+--fs_type=ext4
+# 后面跟着可选参数
+
+# ext4 文件系统类型配置参数
+# mke2fs 参数
+# inode size 大小,不配置默认为256
+--inode_size 256
+# journal_size用来配置journal参数,不配置默认不开启journal(data分区例外)
+--journal_size xxx
+# 镜像预留空间比例,不配置默认0(data分区例外,使用mke2fs默认预留比例)
+--reserve_percent xxx
+# extend_opts 参数,后面可以跟多个参数例如discard
+--extend_opts discard
+
+# e2fsdroid 参数
+# dac_config 为配置路径DAC使用
+--dac_config path
+
+# f2fs 文件系统类型配置参数
+# make_f2fs 配置参数
+# label标签,不设置默认使用mountpoint为lable
+--label xxx
+# prjquota 使能开关
+--prjquota
+# casefold 使能开关
+--casefold
+
+#sload_f2fs 配置参数
+# dac_config 为配置路径DAC使用
+--dac_config path
+# timestamp, 默认不使用
+--timestamp xxx
diff --git a/dsoftbus/build/ohos/images/mkimage/dac.txt b/dsoftbus/build/ohos/images/mkimage/dac.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e45dcd7a8a969c3f9317597cda5e6a84aa8b9792
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/dac.txt
@@ -0,0 +1,25 @@
+# path, mode(oct), uid, gid, cap
+# 支持data, data/*, data/1.txt方式进行路径匹配,优先匹配显示声明路径,如果找不到会尝试匹配*
+# cap 字符匹配能力仅支持L2
+# 目录未配置默认是 00755, 0, 0, 0
+# 文件未配置默认是 00644, 0, 0, 0
+
+# dir
+cache, 00770, 1000, 2001, 0
+config, 00555, 0, 0, 0
+data, 00771, 1000, 1000, 0
+mnt, 00755, 0, 1000, 0
+sbin, 00750, 0, 2000, 0
+system/bin, 00751, 0, 2000, 0
+vendor/bin, 00751, 0, 2000, 0
+vendor, 00755, 0, 2000, 0
+
+# file
+bin/*, 00755, 0, 0, 0
+fstab.* 00640, 0, 2000, 0
+init*, 00750, 0, 2000, 0
+sbin/*, 00750, 0, 2000, 0
+system/bin/*, 00755, 0, 2000, 0
+vendor/bin/*, 00755, 0, 2000, 0
+updater/bin/*, 00755, 0, 2000, 0
+updater/system/bin/*, 00755, 0, 2000, 0
diff --git a/dsoftbus/build/ohos/images/mkimage/mkextimage.py b/dsoftbus/build/ohos/images/mkimage/mkextimage.py
new file mode 100755
index 0000000000000000000000000000000000000000..2d1ff199de57bf8d8cdbf1958a96e25e2bd389ec
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/mkextimage.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python
+# coding: utf-8
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import subprocess
+import os
+
+FS_TYPE = "ext4"
+BLOCKSIZE = 4096
+
+
+def args_parse(argv):
+ parser = argparse.ArgumentParser(description='mkextimage.py')
+
+ parser.add_argument("src_dir", help="The source file for sload.")
+ parser.add_argument("device", help="The deivce for mkfs.")
+ parser.add_argument("mount_point", help="The filesystem mountpoint.")
+ parser.add_argument("fs_size", help="The size of filesystem.")
+ parser.add_argument("--fs_type", help="The filesystem type.")
+ parser.add_argument("--dac_config",
+ help="The path of dac config to e2fsdroid.")
+ parser.add_argument("--inode_size", help="The inode size to mke2fs.")
+ parser.add_argument("--file_context",
+ help="The path of file_context to e2fsdroid.")
+ parser.add_argument("--root_dir", help="The root dir for root image.")
+ parser.add_argument("--journal_size", help="The journal_size for mke2fs.")
+ parser.add_argument("--reserve_percent",
+ help="The reserve_percent for mke2fs.")
+ parser.add_argument("--extend_opts", nargs='+',
+ help="The extend opt for mke2fs.(not support sparse)")
+
+ args = parser.parse_known_args(argv)[0]
+ return args
+
+
+def run_cmd(cmd):
+ res = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ sout, serr = res.communicate()
+
+ return res.pid, res.returncode, sout, serr
+
+
+def build_run_mke2fs(args):
+ mke2fs_opts = ""
+ mke2fs_cmd = ""
+ is_data = False
+
+ if "data" in args.mount_point:
+ is_data = True
+ if args.extend_opts:
+ mke2fs_opts += " -E " + ",".join(args.extend_opts)
+ if args.inode_size:
+ mke2fs_opts += " -I " + args.inode_size
+ else:
+ mke2fs_opts += " -I " + "256"
+ if args.journal_size:
+ mke2fs_opts += " -J size=" + args.journal_size
+ elif not is_data:
+ mke2fs_opts += " -O ^has_journal"
+ if args.reserve_percent:
+ mke2fs_opts += " -m " + args.reserve_percent
+ elif not is_data:
+ mke2fs_opts += " -m 0"
+ mke2fs_opts += " -L " + args.mount_point + " -M " + args.mount_point
+
+ blocks = int(int(args.fs_size) / BLOCKSIZE)
+ mke2fs_cmd += ("mke2fs " + str(mke2fs_opts) + " -t " + FS_TYPE + " -b "
+ + str(BLOCKSIZE) + " " + args.device + " " + str(blocks))
+ res = run_cmd(mke2fs_cmd)
+ if res[1] != 0:
+ print("info: " + mke2fs_cmd)
+ print("pid " + str(res[0]) + " ret " + str(res[1]) + "\n" +
+ res[2].decode() + res[3].decode())
+ return res[1]
+
+
+def build_run_e2fsdroid(args):
+ e2fsdroid_opts = ""
+ e2fsdroid_cmd = ""
+
+ if not args.extend_opts or not "sparse" in args.extend_opts:
+ e2fsdroid_opts += " -e"
+ if args.dac_config:
+ e2fsdroid_opts += " -C " + args.dac_config
+ if args.file_context:
+ e2fsdroid_opts += " -S " + args.file_context
+
+ e2fsdroid_cmd += ("e2fsdroid" + e2fsdroid_opts + " -f " +
+ args.src_dir + " -a " + args.mount_point +
+ " " + args.device)
+ res = run_cmd(e2fsdroid_cmd)
+ if res[1] != 0:
+ print("info: " + e2fsdroid_cmd)
+ print("pid " + str(res[0]) + " ret " + str(res[1]) + "\n" +
+ res[2].decode() + res[3].decode())
+ return res[1]
+
+
+def build(args):
+ args = args_parse(args)
+
+ res = build_run_mke2fs(args)
+ if res != 0:
+ print("error run mke2fs errno: " + str(res))
+ sys.exit(1)
+ res = build_run_e2fsdroid(args)
+ if res != 0:
+ print("error run e2fsdroid errno: " + str(res))
+ os.remove(args.device)
+ sys.exit(2)
+
+
+if __name__ == '__main__':
+ build(sys.argv[1:])
diff --git a/dsoftbus/build/ohos/images/mkimage/mkf2fsimage.py b/dsoftbus/build/ohos/images/mkimage/mkf2fsimage.py
new file mode 100755
index 0000000000000000000000000000000000000000..4d53453f8b7fdc8cde2be254e231638e32114f4e
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/mkf2fsimage.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+# coding: utf-8
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import subprocess
+import os
+
+
+def args_parse(args):
+ parser = argparse.ArgumentParser(description='mkf2fsimage.py')
+
+ parser.add_argument("src_dir", help="The source file for sload.")
+ parser.add_argument("device", help="The deivce for mkfs.")
+ parser.add_argument("mount_point", help="The filesystem mountpoint.")
+ parser.add_argument("fs_size", help="The size of filesystem.")
+ parser.add_argument("--fs_type", help="The filesystem type.")
+ parser.add_argument("--sparse", action='store_true',
+ help="The sparse opt(not support).")
+ parser.add_argument("--prjquota", action='store_true',
+ help="The prjquota opt for mkf2fs.")
+ parser.add_argument("--casefold", action='store_true',
+ help="The casefold opt for mkf2fs.")
+ parser.add_argument("--dac_config",
+ help="The path of fs config to sload_f2fs.")
+ parser.add_argument("--timestamp", help="The timestamp for filesystem.")
+ parser.add_argument("--label", help="The lable for filesystem.")
+ parser.add_argument("--file_context",
+ help="The path of file_context to sload_f2fs.")
+ parser.add_argument("--root_dir", help="The root dir for root image.")
+
+ args = parser.parse_known_args(args)[0]
+ return args
+
+
+def run_cmd(cmd):
+ res = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ sout, serr = res.communicate()
+
+ return res.pid, res.returncode, sout, serr
+
+
+def build_run_mkf2fs(args):
+ mkf2fs_opts = ""
+ mkf2fs_cmd = ""
+
+ if args.sparse:
+ mkf2fs_opts += " -S " + args.fs_size
+ if args.label:
+ mkf2fs_opts += " -l " + args.label
+ else:
+ mkf2fs_opts += " -l " + args.mount_point
+ if args.prjquota:
+ mkf2fs_opts += " -O project_quota,extra_attr"
+ if args.casefold:
+ mkf2fs_opts += " -O casefold -C utf8 "
+
+ mkf2fs_cmd += ("make_f2fs -d1 -f -O encrypt -O quota " +
+ " -O verity -w 4096 -R 0:0 " + mkf2fs_opts +
+ " " + args.device)
+
+ res = run_cmd(mkf2fs_cmd)
+ if res[1] != 0:
+ print("info " + mkf2fs_cmd)
+ print("pid " + str(res[0]) + " ret " + str(res[1]) + "\n" +
+ res[2].decode() + res[3].decode())
+ return res[1]
+
+
+def build_run_sloadf2fs(args):
+ sloadf2fs_opts = ""
+ sloadf2fs_cmd = ""
+
+ if args.sparse:
+ sloadf2fs_opts += " -S"
+ if args.dac_config:
+ sloadf2fs_opts += " -C " + args.dac_config
+ sloadf2fs_opts += " -f " + args.src_dir
+ if args.file_context:
+ sloadf2fs_opts += " -s " + args.file_context
+ if args.mount_point[0] != '/':
+ args.mount_point = "/" + args.mount_point
+ sloadf2fs_opts += " -t " + args.mount_point
+ if args.timestamp:
+ sloadf2fs_opts += " -T " + args.timestamp
+
+ sloadf2fs_cmd += ("sload_f2fs " + sloadf2fs_opts + " " + args.device)
+ res = run_cmd(sloadf2fs_cmd)
+ if res[1] != 0:
+ print("info " + sloadf2fs_cmd)
+ print("pid " + str(res[0]) + " ret " + str(res[1]) + "\n" +
+ res[2].decode() + res[3].decode())
+ return res[1]
+
+
+def build(args):
+ args = args_parse(args)
+
+ if not args.sparse:
+ trunc_cmd = "truncate -s " + args.fs_size + " " + args.device
+ res = run_cmd(trunc_cmd)
+ if res[1] != 0:
+ sys.exit(1)
+ res = build_run_mkf2fs(args)
+ if res != 0:
+ print("error run mkf2fs errno: " + str(res))
+ sys.exit(2)
+ res = build_run_sloadf2fs(args)
+ if res != 0:
+ print("error run sload_f2fs errno: " + str(res))
+ os.remove(args.device)
+ sys.exit(3)
+
+
+if __name__ == '__main__':
+ build(sys.argv[1:])
diff --git a/dsoftbus/build/ohos/images/mkimage/mkimages.py b/dsoftbus/build/ohos/images/mkimage/mkimages.py
new file mode 100755
index 0000000000000000000000000000000000000000..24b09a092bf0ab16463248d4febfd6d403b5f028
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/mkimages.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# coding: utf-8
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import subprocess
+import shutil
+import tempfile
+
+
+def run_cmd(cmd):
+ res = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ sout, serr = res.communicate()
+
+ return res.pid, res.returncode, sout, serr
+
+
+def build_rootdir(src_dir):
+ tmp_dir = tempfile.mkdtemp(prefix="tmp")
+ index = src_dir.rfind('/')
+ root_dir = src_dir[:index + 1] + "root"
+
+ if root_dir:
+ shutil.rmtree(tmp_dir)
+ shutil.copytree(root_dir, tmp_dir, symlinks=True)
+ tmp_dir_system = os.path.join(tmp_dir, "system")
+ shutil.rmtree(tmp_dir_system, ignore_errors=True)
+ shutil.copytree(src_dir, tmp_dir_system, symlinks=True)
+ return tmp_dir
+
+
+def load_config(config_file):
+ mkfs_tools = ""
+ fs_type = ""
+ mk_configs = ""
+
+ with open(config_file, "r") as file:
+ for line in file:
+ line = line.strip()
+ if not line or line.startswith("#"):
+ continue
+ mk_configs += line + " "
+ if "ext4" in mk_configs:
+ fs_type = "ext4"
+ mkfs_tools = "mkextimage.py"
+ elif "f2fs" in mk_configs:
+ mkfs_tools = "mkf2fsimage.py"
+ fs_type = "f2fs"
+ else:
+ print("not support filesystem type!!")
+ sys.exit(1)
+ return mkfs_tools, mk_configs
+
+
+def mk_images(args):
+ if len(args) != 4:
+ print("mk_images need 4 args!!!")
+ sys.exit(1)
+
+ src_dir = args[0]
+ config_file = args[1]
+ device = args[2]
+ is_sparse = args[3]
+
+ if "system.img" in device:
+ src_dir = build_rootdir(src_dir)
+ mkfs_tools, mk_configs = load_config(config_file)
+ mk_configs = src_dir + " " + device + " " + mk_configs
+
+ res = run_cmd(mkfs_tools + " " + mk_configs)
+ if res[1]:
+ print("pid " + str(res[0]) + " ret " + str(res[1]) + "\n" +
+ res[2].decode() + res[3].decode())
+ print("MkImages failed errno: " + str(res[1]))
+ sys.exit(2)
+ # we dont support sparse in mktools.
+ if "sparse" in is_sparse:
+ tmp_device = device + ".tmp"
+ res = run_cmd("img2simg " + device + " " + tmp_device)
+ os.rename(tmp_device, device)
+
+
+if __name__ == '__main__':
+ mk_images(sys.argv[1:])
diff --git a/dsoftbus/build/ohos/images/mkimage/system_image_conf.txt b/dsoftbus/build/ohos/images/mkimage/system_image_conf.txt
new file mode 100644
index 0000000000000000000000000000000000000000..1e2f5a05a0d7ad6a49909f9cab6bb8fc1809ed07
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/system_image_conf.txt
@@ -0,0 +1,4 @@
+/
+1610612224
+--fs_type=ext4
+--dac_config ../../build/ohos/images/mkimage/dac.txt
\ No newline at end of file
diff --git a/dsoftbus/build/ohos/images/mkimage/updater_image_conf.txt b/dsoftbus/build/ohos/images/mkimage/updater_image_conf.txt
new file mode 100644
index 0000000000000000000000000000000000000000..6675ff6dc91ab7d6fc4301e2da45dd301202295b
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/updater_image_conf.txt
@@ -0,0 +1,4 @@
+/updater
+20971520
+--fs_type=ext4
+--dac_config ../../build/ohos/images/mkimage/dac.txt
diff --git a/dsoftbus/build/ohos/images/mkimage/userdata_image_conf.txt b/dsoftbus/build/ohos/images/mkimage/userdata_image_conf.txt
new file mode 100644
index 0000000000000000000000000000000000000000..45add9eb16fb44c2fe76f44f54113aee918c70b5
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/userdata_image_conf.txt
@@ -0,0 +1,4 @@
+/data
+1468006400
+--fs_type=ext4
+--dac_config ../../build/ohos/images/mkimage/dac.txt
\ No newline at end of file
diff --git a/dsoftbus/build/ohos/images/mkimage/vendor_image_conf.txt b/dsoftbus/build/ohos/images/mkimage/vendor_image_conf.txt
new file mode 100644
index 0000000000000000000000000000000000000000..a340af9c4bfbaee03777ae18474dc66cdb394eb8
--- /dev/null
+++ b/dsoftbus/build/ohos/images/mkimage/vendor_image_conf.txt
@@ -0,0 +1,4 @@
+/vendor
+268434944
+--fs_type=ext4
+--dac_config ../../build/ohos/images/mkimage/dac.txt
\ No newline at end of file
diff --git a/dsoftbus/build/ohos/inner_kits_adapter.json b/dsoftbus/build/ohos/inner_kits_adapter.json
new file mode 100644
index 0000000000000000000000000000000000000000..73e61fe19d844ff890cfff0ca9a623ee0d8c3ca6
--- /dev/null
+++ b/dsoftbus/build/ohos/inner_kits_adapter.json
@@ -0,0 +1 @@
+{"multimodalinput":"multimodalinput_service", "ai":"distributedprofile", "sensors":"miscdevice", "hilog":"hilog_native", "hiviewdfx_hilog_native":"hilog_native", "hitrace":"hitrace_native", "hicollie":"hicollie_native"}
diff --git a/dsoftbus/build/ohos/kernel/kernel.gni b/dsoftbus/build/ohos/kernel/kernel.gni
new file mode 100644
index 0000000000000000000000000000000000000000..d585469b4e91621c52847ec49f00b726d367aa8c
--- /dev/null
+++ b/dsoftbus/build/ohos/kernel/kernel.gni
@@ -0,0 +1,15 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+declare_args() {
+ linux_kernel_version = "linux-5.10"
+}
diff --git a/dsoftbus/build/ohos/kits/kits_check.gni b/dsoftbus/build/ohos/kits/kits_check.gni
new file mode 100755
index 0000000000000000000000000000000000000000..1441d8e50eb0b6307d84b2d7f0cdd05c1be49893
--- /dev/null
+++ b/dsoftbus/build/ohos/kits/kits_check.gni
@@ -0,0 +1,56 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+
+template("kits_check_remove") {
+ assert(defined(invoker.subsystem_name), "subsystem_name in required.")
+ assert(defined(invoker.sdk_libs_name), "sdk_libs in required.")
+ assert(defined(invoker.sign_file_root_dir), "sign_file_root_dir in required.")
+ assert(defined(invoker.output_file), "output_file in required.")
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ action_with_pydeps(target_name) {
+ script = "//build/ohos/kits/kits_check_remove.py"
+ deps = _deps
+
+ outputs = [ invoker.output_file ]
+
+ sdk_type = "java"
+ if (defined(invoker.sdk_type)) {
+ sdk_type = invoker.sdk_type
+ }
+
+ args = [
+ "--subsystem-name",
+ invoker.subsystem_name,
+ "--sign-file-root-dir",
+ rebase_path(invoker.sign_file_root_dir, root_build_dir),
+ "--output-file",
+ rebase_path(invoker.output_file, root_build_dir),
+ "--kit-type",
+ sdk_type,
+ ]
+
+ if (invoker.sdk_libs_name != []) {
+ args += [ "--kit-list" ]
+ foreach(sdk_lib_name, invoker.sdk_libs_name) {
+ args += [ sdk_lib_name ]
+ }
+ }
+ }
+}
diff --git a/dsoftbus/build/ohos/kits/kits_check_remove.py b/dsoftbus/build/ohos/kits/kits_check_remove.py
new file mode 100755
index 0000000000000000000000000000000000000000..17aad248e3376a745cafe4ad72a5feaf597597a9
--- /dev/null
+++ b/dsoftbus/build/ohos/kits/kits_check_remove.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+
+sys.path.append(
+ os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_json_file
+
+
+def get_allowlist():
+ return ['hilog_java']
+
+
+def get_kits_signature_list(sign_file_root_dir, subsystem_name,
+ sign_file_name):
+ kits_signature_list = []
+ subsystem_sign_file_dir = os.path.join(sign_file_root_dir, subsystem_name)
+ if not os.path.exists(subsystem_sign_file_dir):
+ return kits_signature_list
+ kits_module_list = os.listdir(subsystem_sign_file_dir)
+ for module_name in kits_module_list:
+ signature_file = os.path.join(subsystem_sign_file_dir, module_name,
+ sign_file_name)
+ if os.path.exists(signature_file):
+ kits_signature_list.append(module_name)
+ return kits_signature_list
+
+
+def check_remove(sign_file_root_dir, subsystem_name, kit_list, kit_type):
+ if not os.path.exists(sign_file_root_dir):
+ return True
+
+ sign_file_name = 'signature'
+ if kit_type == 'so':
+ sign_file_name = 'check.txt'
+
+ kits_signature_list = get_kits_signature_list(sign_file_root_dir,
+ subsystem_name,
+ sign_file_name)
+
+ if len(kits_signature_list) != len(kit_list):
+ return False
+
+ for kits_signature_module in kits_signature_list:
+ if kits_signature_module not in kit_list:
+ return False
+ return True
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--subsystem-name', required=True)
+ parser.add_argument('--sign-file-root-dir', required=True)
+ parser.add_argument('--kit-list', nargs='+', default=[])
+ parser.add_argument('--output-file', required=True)
+ parser.add_argument('--kit-type', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ kit_list = args.kit_list
+ for _kit in get_allowlist():
+ if _kit in kit_list:
+ kit_list.remove(_kit)
+
+ result = check_remove(args.sign_file_root_dir, args.subsystem_name,
+ kit_list, args.kit_type)
+ if not result:
+ raise Exception(
+ "Error, part '{}' kit remove, please check kit config.".
+ format(args.subsystem_name))
+ write_json_file(args.output_file, {})
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/kits/kits_check_remove.pydeps b/dsoftbus/build/ohos/kits/kits_check_remove.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..972b49d1fc1d10904f427443c43161fe2615d356
--- /dev/null
+++ b/dsoftbus/build/ohos/kits/kits_check_remove.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/kits --output build/ohos/kits/kits_check_remove.pydeps build/ohos/kits/kits_check_remove.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/file_utils.py
+kits_check_remove.py
diff --git a/dsoftbus/build/ohos/ndk/BUILD.gn b/dsoftbus/build/ohos/ndk/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..a2961fbf324ee51ab5c9f91f1184042b53f17b4f
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/BUILD.gn
@@ -0,0 +1,310 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ndk_build_home = "//build/ohos/ndk"
+generate_ndk_build_script = "${ndk_build_home}/scan_ndk_targets.py"
+generated_ndk_build_home = get_path_info(ndk_build_home, "gen_dir") + "/ndk"
+generated_ndk_build = "${generated_ndk_build_home}/BUILD.gn"
+
+generate_ndk_build_args = [
+ "--output",
+ rebase_path(generated_ndk_build, root_build_dir),
+ "--root-dir",
+ rebase_path("//", root_build_dir),
+]
+
+# Scan all ndk_library and ndk_header targets.
+exec_script(generate_ndk_build_script, generate_ndk_build_args)
+
+import("//build/config/python.gni")
+import("//build/ohos/ndk/ndk.gni")
+import("//build/ohos/sdk/sdk.gni")
+
+package_info_file = "$ndk_os_irrelevant_out_dir/oh-uni-package.json"
+
+package_info = {
+ path = "native"
+ displayName = "Native"
+ version = current_ndk_version
+ if (release_type != "") {
+ releaseType = release_type
+ }
+ if (meta_version != "") {
+ meta = {
+ metaVersion = meta_version
+ }
+ }
+ apiVersion = api_version
+}
+write_file(package_info_file, package_info, "json")
+
+ndk_targets = [
+ ":ndk_doxygen",
+ ":ndk_cmake_files",
+ "${generated_ndk_build_home}:all_ndk_targets",
+ ":merge_ndk_notice",
+ ":verify_ndk_notice_file",
+]
+if (ndk_system == "win") {
+ ndk_targets +=
+ [ "//prebuilts/clang/ohos/windows-x86_64:windows_x86_64_toolchains" ]
+} else if (ndk_system == "mac") {
+ ndk_targets +=
+ [ "//prebuilts/clang/ohos/darwin-x86_64:darwin_x86_64_toolchains" ]
+} else if (ndk_system == "linux") {
+ ndk_targets +=
+ [ "//prebuilts/clang/ohos/linux-x86_64:linux_x86_64_toolchains" ]
+} else if (ndk_system == "default") {
+ if (host_os == "mac") {
+ ndk_targets +=
+ [ "//prebuilts/clang/ohos/darwin-x86_64:darwin_x86_64_toolchains" ]
+ } else {
+ ndk_targets += [
+ "//prebuilts/clang/ohos/windows-x86_64:windows_x86_64_toolchains",
+ "//prebuilts/clang/ohos/linux-x86_64:linux_x86_64_toolchains",
+ ]
+ }
+}
+group("ohos_ndk") {
+ deps = ndk_targets
+ if (archive_ndk) {
+ deps += [ ":archive_ndk" ]
+ }
+}
+
+group("ndk_doxygen") {
+ deps = [
+ ":create_docs_portal_and_archive",
+ ":generate_ndk_docs",
+ ]
+}
+
+# doxygen always generates index.html
+ndk_doxygen_output = "$ndk_docs_out_dir/html"
+ndk_docs_portal = "$ndk_docs_out_dir/index.html"
+
+action_with_pydeps("generate_ndk_docs") {
+ deps = [ "${generated_ndk_build_home}:all_ndk_targets" ]
+ script = "//build/ohos/ndk/generate_ndk_docs.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ doxygen_file = "//build/ohos/ndk/Doxyfile"
+ inputs = [ doxygen_file ]
+
+ version = current_ndk_version
+ working_dir = "$ndk_headers_out_dir"
+
+ outputs = [ ndk_doxygen_output ]
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--working-dir",
+ rebase_path(working_dir, root_build_dir),
+ "--version",
+ version,
+ "--output",
+ rebase_path(ndk_doxygen_output, root_build_dir),
+ "--doxygen-file",
+ rebase_path(doxygen_file, root_build_dir),
+ "--record-path",
+ rebase_path("$target_gen_dir/" + get_path_info(ndk_doxygen_output, "file") +
+ ".md5.stamp",
+ root_build_dir),
+ ]
+}
+
+action_with_pydeps("create_docs_portal_and_archive") {
+ deps = [ ":generate_ndk_docs" ]
+ script = "//build/ohos/ndk/create_ndk_docs_portal.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--doxygen-output",
+ rebase_path(ndk_doxygen_output + "/index.html", root_build_dir),
+ "--record-path",
+ rebase_path("$target_gen_dir/" + get_path_info(ndk_docs_portal, "file") +
+ ".md5.stamp",
+ root_build_dir),
+ "--portal-path",
+ rebase_path(ndk_docs_portal, root_build_dir),
+ ]
+ outputs = [ ndk_docs_portal ]
+}
+
+ohos_ndk_copy("ndk_cmake_files") {
+ dest_dir = "$ndk_os_irrelevant_out_dir/build"
+ sources = [ "./cmake" ]
+}
+
+action_with_pydeps("merge_ndk_notice") {
+ deps = [ "${generated_ndk_build_home}:all_ndk_targets" ]
+ script = "//build/ohos/notice/merge_notice_files.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ outputs = [
+ ndk_notice_txt,
+ ndk_notice_gz,
+ ]
+
+ args = [
+ "--image-name",
+ "ndk",
+ "--notice-root-dir",
+ rebase_path(ndk_notice_dir, root_build_dir),
+ "--output-notice-txt",
+ rebase_path(ndk_notice_txt, root_build_dir),
+ "--output-notice-gz",
+ rebase_path(ndk_notice_gz, root_build_dir),
+ "--notice-title",
+ "Notices for files and software contained in sdk-native in this directory:",
+ "--static-library-notice-dir",
+ rebase_path(static_libraries_notice_dir, root_build_dir),
+ "--target-cpu",
+ target_cpu,
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+}
+
+action("verify_ndk_notice_file") {
+ deps = [ ":merge_ndk_notice" ]
+
+ script = "//build/core/build_scripts/verify_notice.sh"
+ _verify_result = "${target_out_dir}/ndk_notice_verify_result.out"
+
+ outputs = [ _verify_result ]
+
+ args = [
+ rebase_path(ndk_notice_txt, root_build_dir),
+ rebase_path(_verify_result, root_build_dir),
+ rebase_path(target_out_dir, root_build_dir),
+ ]
+}
+
+group("archive_ndk") {
+ deps = []
+ if (ndk_system == "default") {
+ if (host_os == "mac") {
+ deps += [ ":archive_darwin_ndk" ]
+ } else {
+ deps += [
+ ":archive_linux_ndk",
+ ":archive_windows_ndk",
+ ]
+ }
+ } else if (ndk_system == "win") {
+ deps += [ ":archive_windows_ndk" ]
+ } else if (ndk_system == "mac") {
+ deps += [ ":archive_darwin_ndk" ]
+ } else if (ndk_system == "linux") {
+ deps += [ ":archive_linux_ndk" ]
+ }
+}
+
+action_with_pydeps("archive_windows_ndk") {
+ deps = ndk_targets
+ script = "//build/ohos/ndk/archive_ndk.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ _output = "$ohos_sdk_out_dir/${windows_system}/${ndk_zip_prefix}-${windows_system}-${current_ndk_version}"
+ if (release_type != "") {
+ _output += "-${release_type}.zip"
+ } else {
+ _output += ".zip"
+ }
+
+ args = [
+ "--os-irrelevant-dir",
+ rebase_path(ndk_os_irrelevant_out_dir, root_build_dir),
+ "--output",
+ rebase_path(_output, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--os-specific-dir",
+ rebase_path("$ndk_windows_specific_out_dir", root_build_dir),
+ "--notice-file",
+ rebase_path(ndk_notice_txt, root_build_dir),
+ "--prefix",
+ ndk_zip_prefix,
+ "--record-path",
+ rebase_path(
+ "$target_gen_dir/" + get_path_info(_output, "file") + ".md5.stamp",
+ root_build_dir),
+ ]
+ outputs = [ _output ]
+}
+
+action_with_pydeps("archive_linux_ndk") {
+ deps = ndk_targets
+ script = "//build/ohos/ndk/archive_ndk.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ _output = "$ohos_sdk_out_dir/${linux_system}/${ndk_zip_prefix}-${linux_system}-${current_ndk_version}"
+ if (release_type != "") {
+ _output += "-${release_type}.zip"
+ } else {
+ _output += ".zip"
+ }
+
+ args = [
+ "--os-irrelevant-dir",
+ rebase_path(ndk_os_irrelevant_out_dir, root_build_dir),
+ "--output",
+ rebase_path(_output, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--notice-file",
+ rebase_path(ndk_notice_txt, root_build_dir),
+ "--os-specific-dir",
+ rebase_path("$ndk_linux_specific_out_dir", root_build_dir),
+ "--prefix",
+ ndk_zip_prefix,
+ "--record-path",
+ rebase_path(
+ "$target_gen_dir/" + get_path_info(_output, "file") + ".md5.stamp",
+ root_build_dir),
+ ]
+ outputs = [ _output ]
+}
+
+action_with_pydeps("archive_darwin_ndk") {
+ deps = ndk_targets
+ script = "//build/ohos/ndk/archive_ndk.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ _output = "$ohos_sdk_out_dir/${darwin_system}/${ndk_zip_prefix}-${darwin_system}-${current_ndk_version}"
+ if (release_type != "") {
+ _output += "-${release_type}.zip"
+ } else {
+ _output += ".zip"
+ }
+
+ args = [
+ "--os-irrelevant-dir",
+ rebase_path(ndk_os_irrelevant_out_dir, root_build_dir),
+ "--output",
+ rebase_path(_output, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--notice-file",
+ rebase_path(ndk_notice_txt, root_build_dir),
+ "--os-specific-dir",
+ rebase_path("$ndk_darwin_specific_out_dir", root_build_dir),
+ "--prefix",
+ ndk_zip_prefix,
+ "--record-path",
+ rebase_path(
+ "$target_gen_dir/" + get_path_info(_output, "file") + ".md5.stamp",
+ root_build_dir),
+ ]
+ outputs = [ _output ]
+}
diff --git a/dsoftbus/build/ohos/ndk/Doxyfile b/dsoftbus/build/ohos/ndk/Doxyfile
new file mode 100644
index 0000000000000000000000000000000000000000..c9aeab53e8037308ffc95760e6dfceb796d82f7d
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/Doxyfile
@@ -0,0 +1,287 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Doxyfile 1.8.6
+
+# This file describes the settings to be used by the documentation system
+# doxygen for a project.
+#
+# All text after a double hash (##) is considered a comment and is placed in
+# front of the TAG it is preceding.
+#
+# All text after a single hash (#) is considered a comment and will be ignored.
+# The format is:
+# TAG = value [value, ...]
+# For lists, items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (\" \").
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+DOXYFILE_ENCODING = UTF-8
+PROJECT_NAME = "API Reference"
+PROJECT_NUMBER = %VERSION%
+PROJECT_BRIEF =
+PROJECT_LOGO =
+OUTPUT_DIRECTORY =
+CREATE_SUBDIRS = NO
+OUTPUT_LANGUAGE = English
+BRIEF_MEMBER_DESC = YES
+REPEAT_BRIEF = YES
+ABBREVIATE_BRIEF =
+ALWAYS_DETAILED_SEC = NO
+INLINE_INHERITED_MEMB = NO
+FULL_PATH_NAMES = YES
+STRIP_FROM_PATH =
+STRIP_FROM_INC_PATH =
+SHORT_NAMES = NO
+JAVADOC_AUTOBRIEF = NO
+QT_AUTOBRIEF = NO
+MULTILINE_CPP_IS_BRIEF = NO
+INHERIT_DOCS = YES
+SEPARATE_MEMBER_PAGES = NO
+TAB_SIZE = 4
+ALIASES =
+TCL_SUBST =
+OPTIMIZE_OUTPUT_FOR_C = YES
+OPTIMIZE_OUTPUT_JAVA = NO
+OPTIMIZE_FOR_FORTRAN = NO
+OPTIMIZE_OUTPUT_VHDL = NO
+EXTENSION_MAPPING =
+MARKDOWN_SUPPORT = YES
+AUTOLINK_SUPPORT = YES
+BUILTIN_STL_SUPPORT = NO
+CPP_CLI_SUPPORT = NO
+SIP_SUPPORT = NO
+IDL_PROPERTY_SUPPORT = YES
+DISTRIBUTE_GROUP_DOC = NO
+SUBGROUPING = YES
+INLINE_GROUPED_CLASSES = NO
+INLINE_SIMPLE_STRUCTS = NO
+TYPEDEF_HIDES_STRUCT = NO
+LOOKUP_CACHE_SIZE = 0
+EXTRACT_ALL = NO
+EXTRACT_PRIVATE = NO
+EXTRACT_PACKAGE = NO
+EXTRACT_STATIC = NO
+EXTRACT_LOCAL_CLASSES = NO
+EXTRACT_LOCAL_METHODS = NO
+EXTRACT_ANON_NSPACES = NO
+HIDE_UNDOC_MEMBERS = YES
+HIDE_UNDOC_CLASSES = NO
+HIDE_FRIEND_COMPOUNDS = NO
+HIDE_IN_BODY_DOCS = NO
+INTERNAL_DOCS = YES
+CASE_SENSE_NAMES = YES
+HIDE_SCOPE_NAMES = NO
+SHOW_INCLUDE_FILES = YES
+SHOW_GROUPED_MEMB_INC = NO
+FORCE_LOCAL_INCLUDES = NO
+INLINE_INFO = YES
+SORT_MEMBER_DOCS = YES
+SORT_BRIEF_DOCS = NO
+SORT_MEMBERS_CTORS_1ST = NO
+SORT_GROUP_NAMES = NO
+SORT_BY_SCOPE_NAME = NO
+STRICT_PROTO_MATCHING = NO
+GENERATE_TODOLIST = YES
+GENERATE_TESTLIST = YES
+GENERATE_BUGLIST = YES
+GENERATE_DEPRECATEDLIST= YES
+ENABLED_SECTIONS =
+MAX_INITIALIZER_LINES = 30
+SHOW_USED_FILES = YES
+SHOW_FILES = YES
+SHOW_NAMESPACES = YES
+FILE_VERSION_FILTER =
+LAYOUT_FILE =
+CITE_BIB_FILES =
+QUIET = NO
+WARNINGS = YES
+WARN_IF_UNDOCUMENTED = YES
+WARN_IF_DOC_ERROR = YES
+WARN_NO_PARAMDOC = NO
+WARN_FORMAT = "$file:$line: $text"
+WARN_LOGFILE =
+INPUT =
+INPUT_ENCODING = UTF-8
+FILE_PATTERNS = *.h
+RECURSIVE = YES
+EXCLUDE =
+EXCLUDE_SYMLINKS = NO
+EXCLUDE_PATTERNS =
+EXCLUDE_SYMBOLS =
+EXAMPLE_PATH =
+EXAMPLE_PATTERNS =
+EXAMPLE_RECURSIVE = NO
+IMAGE_PATH =
+INPUT_FILTER =
+FILTER_PATTERNS =
+FILTER_SOURCE_FILES = NO
+FILTER_SOURCE_PATTERNS =
+USE_MDFILE_AS_MAINPAGE =
+SOURCE_BROWSER = YES
+INLINE_SOURCES = NO
+STRIP_CODE_COMMENTS = YES
+REFERENCED_BY_RELATION = NO
+REFERENCES_RELATION = NO
+REFERENCES_LINK_SOURCE = YES
+SOURCE_TOOLTIPS = YES
+USE_HTAGS = NO
+VERBATIM_HEADERS = YES
+ALPHABETICAL_INDEX = YES
+COLS_IN_ALPHA_INDEX = 5
+IGNORE_PREFIX =
+GENERATE_HTML = YES
+HTML_OUTPUT = %OUTPUT_DIR%
+HTML_FILE_EXTENSION = .html
+HTML_HEADER =
+HTML_FOOTER =
+HTML_STYLESHEET =
+HTML_EXTRA_STYLESHEET =
+HTML_EXTRA_FILES =
+HTML_COLORSTYLE_HUE = 220
+HTML_COLORSTYLE_SAT = 100
+HTML_COLORSTYLE_GAMMA = 80
+HTML_TIMESTAMP = NO
+HTML_DYNAMIC_SECTIONS = NO
+HTML_INDEX_NUM_ENTRIES = 100
+GENERATE_DOCSET = NO
+DOCSET_FEEDNAME = "Doxygen generated docs"
+DOCSET_BUNDLE_ID = org.doxygen.Project
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+DOCSET_PUBLISHER_NAME = Publisher
+GENERATE_HTMLHELP = NO
+CHM_FILE =
+HHC_LOCATION =
+GENERATE_CHI = NO
+CHM_INDEX_ENCODING =
+BINARY_TOC = NO
+TOC_EXPAND = NO
+GENERATE_QHP = NO
+QCH_FILE =
+QHP_NAMESPACE =
+QHP_VIRTUAL_FOLDER = doc
+QHP_CUST_FILTER_NAME =
+QHP_CUST_FILTER_ATTRS =
+QHP_SECT_FILTER_ATTRS =
+QHG_LOCATION =
+GENERATE_ECLIPSEHELP = NO
+ECLIPSE_DOC_ID = org.doxygen.Project
+DISABLE_INDEX = NO
+GENERATE_TREEVIEW = YES
+ENUM_VALUES_PER_LINE = 4
+TREEVIEW_WIDTH = 250
+EXT_LINKS_IN_WINDOW = NO
+FORMULA_FONTSIZE = 10
+FORMULA_TRANSPARENT = YES
+USE_MATHJAX = NO
+MATHJAX_FORMAT = HTML-CSS
+MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
+MATHJAX_EXTENSIONS =
+MATHJAX_CODEFILE =
+SEARCHENGINE = YES
+SERVER_BASED_SEARCH = NO
+EXTERNAL_SEARCH = NO
+SEARCHENGINE_URL =
+SEARCHDATA_FILE = searchdata.xml
+EXTERNAL_SEARCH_ID =
+EXTRA_SEARCH_MAPPINGS =
+GENERATE_LATEX = NO
+LATEX_OUTPUT =
+LATEX_CMD_NAME = latex
+MAKEINDEX_CMD_NAME = makeindex
+COMPACT_LATEX = NO
+PAPER_TYPE = a4
+EXTRA_PACKAGES =
+LATEX_HEADER =
+LATEX_FOOTER =
+LATEX_EXTRA_FILES =
+PDF_HYPERLINKS = YES
+USE_PDFLATEX = YES
+LATEX_BATCHMODE = NO
+LATEX_HIDE_INDICES = NO
+LATEX_SOURCE_CODE = NO
+LATEX_BIB_STYLE = plain
+GENERATE_RTF = NO
+RTF_OUTPUT = rtf
+COMPACT_RTF = NO
+RTF_HYPERLINKS = NO
+RTF_STYLESHEET_FILE =
+RTF_EXTENSIONS_FILE =
+GENERATE_MAN = NO
+MAN_OUTPUT = man
+MAN_EXTENSION = .3
+MAN_LINKS = NO
+GENERATE_XML = NO
+XML_OUTPUT = xml
+XML_SCHEMA =
+XML_DTD =
+XML_PROGRAMLISTING = YES
+GENERATE_DOCBOOK = NO
+DOCBOOK_OUTPUT = docbook
+GENERATE_AUTOGEN_DEF = NO
+GENERATE_PERLMOD = NO
+PERLMOD_LATEX = NO
+PERLMOD_PRETTY = YES
+PERLMOD_MAKEVAR_PREFIX =
+ENABLE_PREPROCESSING = YES
+MACRO_EXPANSION = NO
+EXPAND_ONLY_PREDEF = NO
+SEARCH_INCLUDES = YES
+INCLUDE_PATH =
+INCLUDE_FILE_PATTERNS =
+PREDEFINED =
+EXPAND_AS_DEFINED =
+SKIP_FUNCTION_MACROS = YES
+TAGFILES =
+GENERATE_TAGFILE =
+ALLEXTERNALS = NO
+EXTERNAL_GROUPS = YES
+EXTERNAL_PAGES = YES
+PERL_PATH = /usr/bin/perl
+CLASS_DIAGRAMS = YES
+MSCGEN_PATH =
+DIA_PATH =
+HIDE_UNDOC_RELATIONS = YES
+HAVE_DOT = NO
+DOT_NUM_THREADS = 0
+DOT_FONTNAME = Helvetica
+DOT_FONTSIZE = 10
+DOT_FONTPATH =
+CLASS_GRAPH = YES
+COLLABORATION_GRAPH = YES
+GROUP_GRAPHS = YES
+UML_LOOK = NO
+UML_LIMIT_NUM_FIELDS = 10
+TEMPLATE_RELATIONS = NO
+INCLUDE_GRAPH = YES
+INCLUDED_BY_GRAPH = YES
+CALL_GRAPH = NO
+CALLER_GRAPH = NO
+GRAPHICAL_HIERARCHY = YES
+DIRECTORY_GRAPH = YES
+DOT_IMAGE_FORMAT = png
+INTERACTIVE_SVG = NO
+DOT_PATH =
+DOTFILE_DIRS =
+MSCFILE_DIRS =
+DIAFILE_DIRS =
+DOT_GRAPH_MAX_NODES = 50
+MAX_DOT_GRAPH_DEPTH = 0
+DOT_TRANSPARENT = NO
+DOT_MULTI_TARGETS = YES
+GENERATE_LEGEND = YES
+DOT_CLEANUP = YES
diff --git a/dsoftbus/build/ohos/ndk/archive_ndk.py b/dsoftbus/build/ohos/ndk/archive_ndk.py
new file mode 100755
index 0000000000000000000000000000000000000000..c198314254036559bc40906b82bd6eec9b2cf13a
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/archive_ndk.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output', help='generated ndk stub file')
+ parser.add_option('--os-irrelevant-dir',
+ help='base directory of ndk common files')
+ parser.add_option('--os-specific-dir',
+ help='base directory of os specific stuff')
+ parser.add_option('--prefix',
+ help='prefix string of directory in archive zipfile')
+ parser.add_option('--notice-file', help='path to notice file')
+ parser.add_option('--record-path', help='path to md5.stamp file')
+
+ options, _ = parser.parse_args(args)
+ return options
+
+
+def do_archive(output, directory, prefix, compress_fn):
+ files = []
+ for root, _, filenames in os.walk(directory):
+ for f in filenames:
+ files.extend([os.path.join(root, f)])
+ with zipfile.ZipFile(output, 'a') as outfile:
+ for f in files:
+ compress = compress_fn(f) if compress_fn else None
+ if prefix:
+ zip_path = os.path.join(prefix, os.path.relpath(f, directory))
+ else:
+ zip_path = os.path.relpath(f, directory)
+ build_utils.add_to_zip_hermetic(outfile,
+ zip_path,
+ src_path=f,
+ compress=compress)
+
+
+def archive_ndk(output, os_irrelevant_dir, os_specific_dir, prefix,
+ compress_fn, notice):
+ # Create an empty zipfile first, then add stuff to it.
+ with zipfile.ZipFile(output, 'w') as outfile:
+ pass
+ for directory in [os_irrelevant_dir, os_specific_dir]:
+ do_archive(output, directory, prefix, compress_fn)
+
+ with zipfile.ZipFile(output, 'a') as zip_file:
+ compress = compress_fn(notice) if compress_fn else None
+ if prefix:
+ zip_path = os.path.join(prefix, os.path.basename(notice))
+ else:
+ zip_path = os.path.basename(notice)
+ build_utils.add_to_zip_hermetic(zip_file,
+ zip_path,
+ src_path=notice,
+ compress=compress)
+
+
+def main(args):
+ options = parse_args(args)
+
+ os_irrelevant_dir = options.os_irrelevant_dir
+ os_specific_dir = options.os_specific_dir
+ depfile_deps = set(
+ build_utils.get_all_files(os_irrelevant_dir) +
+ build_utils.get_all_files(os_specific_dir))
+ depfile_deps.add(options.notice_file)
+
+ build_utils.call_and_write_depfile_if_stale(lambda: archive_ndk(
+ options.output, os_irrelevant_dir, os_specific_dir, options.prefix,
+ lambda _: True, options.notice_file),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=depfile_deps,
+ output_paths=([options.output]),
+ record_path=options.record_path,
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/ndk/archive_ndk.pydeps b/dsoftbus/build/ohos/ndk/archive_ndk.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..0a178b536fb0c4a4ee0584fe15353336f85ba545
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/archive_ndk.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/ndk --output build/ohos/ndk/archive_ndk.pydeps build/ohos/ndk/archive_ndk.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+archive_ndk.py
diff --git a/dsoftbus/build/ohos/ndk/check_ndk_header_signature.py b/dsoftbus/build/ohos/ndk/check_ndk_header_signature.py
new file mode 100755
index 0000000000000000000000000000000000000000..351fb60497eb31a3524f07486d31a13d0a264949
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/check_ndk_header_signature.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402
+from scripts.interface_mgr import InterfaceMgr
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output', help='generated ndk stub file')
+ parser.add_option('--headers',
+ action='append',
+ help='base directory of ndk common files')
+ parser.add_option('--generated-signature',
+ help='base directory of os specific stuff')
+ parser.add_option('--saved-signature',
+ help='prefix string of directory in archive zipfile')
+ parser.add_option('--check-signature',
+ action='store_true',
+ help='check ndk signature')
+ parser.add_option(
+ '--root-build-dir',
+ help='root build directory, used to strip relative address')
+
+ options, _ = parser.parse_args(args)
+
+ return options
+
+
+def header_signature(output, headers, root_build_dir, saved, generated, check):
+ signature = []
+ mgr = InterfaceMgr()
+
+ for f in headers:
+ signature.append('//{} {}'.format(os.path.relpath(f, root_build_dir),
+ mgr.get_file_sha256(f)))
+
+ os.makedirs(os.path.dirname(generated), exist_ok=True)
+ with open(generated, 'w') as g:
+ g.write('\n'.join(sorted(signature)))
+
+ if check and mgr.get_file_sha256(generated) != mgr.get_file_sha256(saved):
+ raise Exception(
+ "Error: ndk header signature changed. generated signature {} has different signature with saved signature {}"
+ .format(generated, saved))
+
+ build_utils.touch(output)
+
+
+def main(args):
+ options = parse_args(args)
+
+ depfile_deps = set()
+ if options.check_signature:
+ depfile_deps.add(options.saved_signature)
+
+ build_utils.call_and_write_depfile_if_stale(lambda: header_signature(
+ options.output, options.headers, options.root_build_dir, options.
+ saved_signature, options.generated_signature, options.check_signature),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=depfile_deps,
+ output_paths=([options.output]),
+ input_strings=args,
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/ndk/check_ndk_header_signature.pydeps b/dsoftbus/build/ohos/ndk/check_ndk_header_signature.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..236a58375af0568af41111a94bb5d708eea210fe
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/check_ndk_header_signature.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/ndk --output build/ohos/ndk/check_ndk_header_signature.pydeps build/ohos/ndk/check_ndk_header_signature.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/interface_mgr.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+check_ndk_header_signature.py
diff --git a/dsoftbus/build/ohos/ndk/cmake/ohos.toolchain.cmake b/dsoftbus/build/ohos/ndk/cmake/ohos.toolchain.cmake
new file mode 100755
index 0000000000000000000000000000000000000000..12297170821a775e5abd81d80d72b1c8dc72753c
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/cmake/ohos.toolchain.cmake
@@ -0,0 +1,240 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Description: The configuration of toolchain file supplied to cmake, which specifies
+# locations for compilers and toolchain utilities, and other target
+# platform and compiler related information.
+
+cmake_minimum_required(VERSION 3.6.0)
+set(CMAKE_SYSTEM_VERSION 1)
+
+if(DEFINED OHOS_SDK_NATIVE_TOOLCHAIN_DEFINED)
+ return()
+endif()
+set(OHOS_SDK_NATIVE_TOOLCHAIN_DEFINED true)
+
+# Set OHOS_SDK_NATIVE
+get_filename_component(OHOS_SDK_NATIVE "${CMAKE_CURRENT_LIST_DIR}/../.." ABSOLUTE)
+file(TO_CMAKE_PATH "${OHOS_SDK_NATIVE}" OHOS_SDK_NATIVE)
+
+# Sdk native version
+file(STRINGS "${OHOS_SDK_NATIVE}/oh-uni-package.json" NATIVE_VER REGEX "\"version\":.*")
+string(REGEX REPLACE "\"version\":(.*)$" "\\1" SDK_NATIVE_VERSION "${NATIVE_VER}")
+string(STRIP "${SDK_NATIVE_VERSION}" SDK_NATIVE_VERSION)
+
+# Common default settings
+set(OHOS TRUE)
+set(CMAKE_SYSTEM_NAME OHOS)
+
+if(NOT DEFINED OHOS_PLATFORM_LEVEL)
+ set(OHOS_PLATFORM_LEVEL 1)
+endif()
+
+if(NOT DEFINED OHOS_TOOLCHAIN)
+ set(OHOS_TOOLCHAIN clang)
+endif()
+
+if(NOT DEFINED OHOS_STL)
+ set(OHOS_STL c++_shared)
+endif()
+
+if(NOT DEFINED OHOS_PIE)
+ set(OHOS_PIE TRUE)
+endif()
+
+if(NOT DEFINED OHOS_ARM_NEON)
+ set(OHOS_ARM_NEON thumb)
+endif()
+
+# set the ABI
+if(NOT DEFINED OHOS_ARCH)
+ set(OHOS_ARCH arm64-v8a)
+endif()
+
+# set the undefined symbols
+if(DEFINED OHOS_NO_UNDEFINED)
+ if(NOT DEFINED OHOS_ALLOW_UNDEFINED_SYMBOLS)
+ set(OHOS_ALLOW_UNDEFINED_SYMBOLS "${OHOS_NO_UNDEFINED}")
+ endif()
+endif()
+
+# set the ccache
+if(DEFINED SDK_NATIVE_CCACHE AND NOT DEFINED OHOS_CCACHE)
+ set(OHOS_CCACHE "${SDK_NATIVE_CCACHE}")
+endif()
+
+# set the sdk native platform
+include(${CMAKE_CURRENT_LIST_DIR}/sdk_native_platforms.cmake)
+if(NOT DEFINED OHOS_SDK_NATIVE_PLATFORM)
+ set(OHOS_SDK_NATIVE_PLATFORM "ohos-${SDK_NATIVE_MIN_PLATFORM_LEVEL}")
+endif()
+
+# set the sdk native platform level
+string(REPLACE "ohos-" "" OHOS_SDK_NATIVE_PLATFORM_LEVEL ${OHOS_SDK_NATIVE_PLATFORM})
+
+# set find executable programs on the host system path
+set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
+set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
+list(APPEND CMAKE_FIND_ROOT_PATH "${OHOS_SDK_NATIVE}")
+
+# set the arch abi
+set(CMAKE_OHOS_ARCH_ABI ${OHOS_ARCH})
+
+# set arch diff property ...
+if(OHOS_ARCH STREQUAL arm64-v8a)
+ set(OHOS_TOOLCHAIN_NAME aarch64-linux-ohos)
+ set(OHOS_LLVM ${OHOS_TOOLCHAIN_NAME})
+ set(CMAKE_SYSTEM_PROCESSOR aarch64)
+elseif(OHOS_ARCH STREQUAL armeabi-v7a)
+ set(OHOS_TOOLCHAIN_NAME arm-linux-ohos)
+ set(OHOS_LLVM ${OHOS_TOOLCHAIN_NAME})
+ set(CMAKE_SYSTEM_PROCESSOR arm)
+elseif(OHOS_ARCH STREQUAL x86_64)
+ set(OHOS_TOOLCHAIN_NAME x86_64-linux-ohos)
+ set(OHOS_LLVM ${OHOS_TOOLCHAIN_NAME})
+ set(CMAKE_SYSTEM_PROCESSOR x86_64)
+else()
+ message(FATAL_ERROR "unrecognized ${OHOS_ARCH}")
+endif()
+
+set(CMAKE_C_COMPILER_TARGET ${OHOS_LLVM})
+set(CMAKE_CXX_COMPILER_TARGET ${OHOS_LLVM})
+
+# Export configurable variables for the try_compile() command.
+set(CMAKE_TRY_COMPILE_PLATFORM_VARIABLES
+ OHOS_TOOLCHAIN
+ OHOS_ARCH
+ OHOS_PLATFORM)
+
+# Set the common c flags
+set(OHOS_C_COMPILER_FLAGS)
+list(APPEND OHOS_C_COMPILER_FLAGS
+ -g
+ -fdata-sections
+ -ffunction-sections
+ -funwind-tables
+ -fstack-protector-strong
+ -no-canonical-prefixes
+ -fno-addrsig
+ -Wa,--noexecstack)
+if(OHOS_DISABLE_FORMAT_STRING_CHECKS)
+ list(APPEND OHOS_C_COMPILER_FLAGS -Wno-error=format-security)
+else()
+ list(APPEND OHOS_C_COMPILER_FLAGS -Wformat -Werror=format-security)
+endif()
+string(REPLACE ";" " " OHOS_C_COMPILER_FLAGS "${OHOS_C_COMPILER_FLAGS}")
+
+# set the common c++ flags
+set(OHOS_CXX_COMPILER_FLAGS)
+
+# set the debug variant flags
+set(OHOS_DEBUG_COMPILER_FLAGS)
+list(APPEND OHOS_DEBUG_COMPILER_FLAGS -O0 -fno-limit-debug-info)
+string(REPLACE ";" " " OHOS_DEBUG_COMPILER_FLAGS "${OHOS_DEBUG_COMPILER_FLAGS}")
+
+# set the release variant flags
+set(OHOS_RELEASE_COMPILER_FLAGS)
+list(APPEND OHOS_RELEASE_COMPILER_FLAGS -O2)
+list(APPEND OHOS_RELEASE_COMPILER_FLAGS -DNDEBUG)
+string(REPLACE ";" " " OHOS_RELEASE_COMPILER_FLAGS "${OHOS_RELEASE_COMPILER_FLAGS}")
+
+# set the common link flags
+set(OHOS_COMMON_LINKER_FLAGS)
+list(APPEND OHOS_COMMON_LINKER_FLAGS --rtlib=compiler-rt)
+list(APPEND OHOS_COMMON_LINKER_FLAGS -fuse-ld=lld)
+
+if(OHOS_STL STREQUAL c++_static)
+ list(APPEND OHOS_COMMON_LINKER_FLAGS "-static-libstdc++")
+elseif(OHOS_STL STREQUAL none)
+ list(APPEND OHOS_CXX_COMPILER_FLAGS "-nostdinc++")
+ list(APPEND OHOS_COMMON_LINKER_FLAGS "-nostdlib++")
+elseif(OHOS_STL STREQUAL c++_shared)
+else()
+ message(FATAL_ERROR "Unsupported STL configuration: ${OHOS_STL}.")
+endif()
+
+list(APPEND OHOS_COMMON_LINKER_FLAGS
+ -Wl,--build-id=sha1
+ -Wl,--warn-shared-textrel
+ -Wl,--fatal-warnings
+ -lunwind)
+if(NOT OHOS_ALLOW_UNDEFINED_SYMBOLS)
+ list(APPEND OHOS_COMMON_LINKER_FLAGS -Wl,--no-undefined)
+endif()
+list(APPEND OHOS_COMMON_LINKER_FLAGS -Qunused-arguments -Wl,-z,noexecstack)
+string(REPLACE ";" " " OHOS_COMMON_LINKER_FLAGS "${OHOS_COMMON_LINKER_FLAGS}")
+
+# set the executable link flags
+set(OHOS_EXE_LINKER_FLAGS)
+list(APPEND OHOS_EXE_LINKER_FLAGS -Wl,--gc-sections)
+string(REPLACE ";" " " OHOS_EXE_LINKER_FLAGS "${OHOS_EXE_LINKER_FLAGS}")
+
+# set the other flags
+set(CMAKE_C_STANDARD_LIBRARIES_INIT "-lm")
+set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "-lm")
+set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
+
+# set the cmake global cflags
+set(CMAKE_C_FLAGS "" CACHE STRING "Flags for all build types.")
+set(CMAKE_C_FLAGS "${OHOS_C_COMPILER_FLAGS} ${CMAKE_C_FLAGS}")
+
+set(CMAKE_C_FLAGS_DEBUG "" CACHE STRING "Flags for debug variant builds.")
+set(CMAKE_C_FLAGS_DEBUG "${OHOS_DEBUG_COMPILER_FLAGS} ${CMAKE_C_FLAGS_DEBUG}")
+
+set(CMAKE_C_FLAGS_RELEASE "" CACHE STRING "Flags for release variant builds.")
+set(CMAKE_C_FLAGS_RELEASE "${OHOS_RELEASE_COMPILER_FLAGS} ${CMAKE_C_FLAGS_RELEASE}")
+
+# set the cmake global cppflags
+set(CMAKE_CXX_FLAGS "" CACHE STRING "Flags for all build types.")
+set(CMAKE_CXX_FLAGS "${OHOS_C_COMPILER_FLAGS} ${OHOS_CXX_COMPILER_FLAGS} ${CMAKE_CXX_FLAGS}")
+
+set(CMAKE_CXX_FLAGS_DEBUG "" CACHE STRING "Flags for debug variant builds.")
+set(CMAKE_CXX_FLAGS_DEBUG "${OHOS_DEBUG_COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_DEBUG}")
+
+set(CMAKE_CXX_FLAGS_RELEASE "" CACHE STRING "Flags for release variant builds.")
+set(CMAKE_CXX_FLAGS_RELEASE "${OHOS_RELEASE_COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_RELEASE}")
+
+# set the link flags
+set(CMAKE_SHARED_LINKER_FLAGS "" CACHE STRING "Linker flags to be used to create shared libraries.")
+set(CMAKE_SHARED_LINKER_FLAGS "${OHOS_COMMON_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}")
+
+set(CMAKE_MODULE_LINKER_FLAGS "" CACHE STRING "Linker flags to be used to create modules.")
+set(CMAKE_MODULE_LINKER_FLAGS "${OHOS_COMMON_LINKER_FLAGS} ${CMAKE_MODULE_LINKER_FLAGS}")
+
+set(CMAKE_EXE_LINKER_FLAGS "" CACHE STRING "Linker flags to be used to create executables.")
+set(CMAKE_EXE_LINKER_FLAGS "${OHOS_COMMON_LINKER_FLAGS} ${OHOS_EXE_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}")
+
+# set the executable suffix
+set(HOST_SYSTEM_EXE_SUFFIX)
+if(CMAKE_HOST_SYSTEM_NAME STREQUAL Windows)
+ set(HOST_SYSTEM_EXE_SUFFIX .exe)
+endif()
+
+# set the toolchain config.
+set(TOOLCHAIN_ROOT_PATH "${OHOS_SDK_NATIVE}/llvm")
+set(TOOLCHAIN_BIN_PATH "${OHOS_SDK_NATIVE}/llvm/bin")
+
+set(CMAKE_SYSROOT "${OHOS_SDK_NATIVE}/sysroot")
+set(CMAKE_LIBRARY_ARCHITECTURE "${OHOS_TOOLCHAIN_NAME}")
+list(APPEND CMAKE_SYSTEM_LIBRARY_PATH "/usr/lib/${OHOS_TOOLCHAIN_NAME}")
+set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN "${TOOLCHAIN_ROOT_PATH}")
+set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${TOOLCHAIN_ROOT_PATH}")
+set(CMAKE_C_COMPILER "${TOOLCHAIN_BIN_PATH}/clang${HOST_SYSTEM_EXE_SUFFIX}")
+set(CMAKE_CXX_COMPILER "${TOOLCHAIN_BIN_PATH}/clang++${HOST_SYSTEM_EXE_SUFFIX}")
+
+set(OHOS_AR "${TOOLCHAIN_BIN_PATH}/llvm-ar${HOST_SYSTEM_EXE_SUFFIX}")
+set(OHOS_RANLIB "${TOOLCHAIN_BIN_PATH}/llvm-ranlib${HOST_SYSTEM_EXE_SUFFIX}")
+set(CMAKE_AR "${OHOS_AR}" CACHE FILEPATH "Archiver")
+set(CMAKE_RANLIB "${OHOS_RANLIB}" CACHE FILEPATH "Ranlib")
diff --git a/dsoftbus/build/ohos/ndk/cmake/sdk_native_platforms.cmake b/dsoftbus/build/ohos/ndk/cmake/sdk_native_platforms.cmake
new file mode 100755
index 0000000000000000000000000000000000000000..69af4021fee8870386e0d921e4dbfad6125aba3c
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/cmake/sdk_native_platforms.cmake
@@ -0,0 +1,15 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set(SDK_NATIVE_MIN_PLATFORM_LEVEL "1")
+set(SDK_NATIVE_MAX_PLATFORM_LEVEL "1")
\ No newline at end of file
diff --git a/dsoftbus/build/ohos/ndk/copy_notices_file.py b/dsoftbus/build/ohos/ndk/copy_notices_file.py
new file mode 100755
index 0000000000000000000000000000000000000000..21e17900f499c7a35783f42aa625926379515584
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/copy_notices_file.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+
+
+def create_dest_file(dest_dir):
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir, exist_ok=True)
+
+
+def get_file_name(target_name, opensource_name):
+ file_name = ''
+ if not opensource_name.strip():
+ file_name = '{}.txt'.format(target_name)
+ else:
+ file_name = '{}.txt'.format(opensource_name)
+ return file_name
+
+
+def merge_multi_notices(notice_root_dir,
+ module_notices,
+ target_name,
+ opensource_name):
+
+ dest_file = os.path.join(notice_root_dir,
+ get_file_name(target_name, opensource_name))
+
+ with open(dest_file, 'a') as dest_notice:
+ for notice in module_notices:
+ if os.path.exists(notice):
+ with open(notice, 'r', errors='ignore') as source_notice:
+ for line in source_notice.readlines():
+ dest_notice.write(line)
+ dest_notice.write(u'\n\n')
+
+
+def copy_notice_file(root_out_dir,
+ module_notices,
+ target_name,
+ opensource_name):
+ nf_dest_dir = os.path.join(root_out_dir, 'NOTICE_FILE/system')
+ create_dest_file(nf_dest_dir)
+
+ # If the module has multi-notices, it need to merge one file.
+ if len(module_notices) > 1:
+ merge_multi_notices(nf_dest_dir,
+ module_notices,
+ target_name,
+ opensource_name)
+ else:
+ for notice in module_notices:
+ if os.path.exists(notice):
+ file_name = get_file_name(target_name, opensource_name)
+ dest_file = os.path.join(nf_dest_dir, file_name)
+ shutil.copy(notice, dest_file)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--root-out-dir', help='', required=True)
+ parser.add_argument('--target-name', help='', required=True)
+ parser.add_argument('--opensource-name', help='', required=True)
+ parser.add_argument('--module-notices', nargs='+', help='', required=True)
+ args = parser.parse_args()
+
+ copy_notice_file(args.root_out_dir,
+ args.module_notices,
+ args.target_name,
+ args.opensource_name)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/ndk/create_ndk_docs_portal.py b/dsoftbus/build/ohos/ndk/create_ndk_docs_portal.py
new file mode 100755
index 0000000000000000000000000000000000000000..f3b99a930dfc69d32e4122f5e19fe2c67ee9aaa1
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/create_ndk_docs_portal.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402
+
+PORTAL_CONTENTS = """
+
+"""
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--portal-path', help='path to portal html')
+ parser.add_option('--doxygen-output', help='ndk doxygen outputs')
+ parser.add_option('--record-path', help='path to md5.stamp file')
+ parser.add_option('--docs-archive', help='path of docs archive zipfile')
+ parser.add_option(
+ '--archive-doc',
+ default=False,
+ action='store_true',
+ help='whether to archive doc or not')
+
+ options, _ = parser.parse_args(args)
+ return options
+
+
+def write_portal_and_archive(options):
+ # If user doesn't have doxygen installed, then no document is generated,
+ # no need to generate portal html
+ if not os.path.exists(options.doxygen_output):
+ return
+
+ contents = [
+ PORTAL_CONTENTS % os.path.relpath(options.doxygen_output,
+ os.path.dirname(options.portal_path))
+ ]
+ with open(options.portal_path, 'w') as f:
+ f.write('\n'.join(contents))
+
+ if options.archive_doc:
+ os.makedirs(os.path.dirname(options.docs_archive), exist_ok=True)
+ build_utils.zip_dir(
+ options.docs_archive,
+ os.path.dirname(options.portal_path),
+ compress_fn=lambda _: True)
+
+
+def main(args):
+ options = parse_args(args)
+
+ depfile_deps = set()
+ if os.path.exists(options.doxygen_output):
+ depfile_deps.add(options.doxygen_output)
+ outputs = [options.portal_path]
+ if options.docs_archive:
+ outputs.append(options.docs_archive)
+
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: write_portal_and_archive(options),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=depfile_deps,
+ input_strings=[options.archive_doc],
+ output_paths=(outputs),
+ record_path=options.record_path,
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/ndk/create_ndk_docs_portal.pydeps b/dsoftbus/build/ohos/ndk/create_ndk_docs_portal.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..8bc4c7482017fcd66649d8135995792587e193aa
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/create_ndk_docs_portal.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/ndk --output build/ohos/ndk/create_ndk_docs_portal.pydeps build/ohos/ndk/create_ndk_docs_portal.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+create_ndk_docs_portal.py
diff --git a/dsoftbus/build/ohos/ndk/generate_ndk_docs.py b/dsoftbus/build/ohos/ndk/generate_ndk_docs.py
new file mode 100755
index 0000000000000000000000000000000000000000..811d7d97589ff736f556d79013e3a3558fb0970d
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/generate_ndk_docs.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import re
+import tempfile
+import distutils.spawn
+import shutil
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402
+
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--version', help='software version')
+ parser.add_option('--doxygen-file', help='doxygen config file')
+ parser.add_option('--output', help='output index.html')
+ parser.add_option('--record-path', help='path to md5.stamp file')
+ parser.add_option(
+ '--working-dir',
+ help='the directory where doxygen command will be executed')
+
+ options, _ = parser.parse_args(args)
+ return options
+
+
+def generate_ndk_docs(options, html_output_dir):
+ contents = None
+ with tempfile.NamedTemporaryFile(
+ suffix=os.path.basename(options.doxygen_file)) as doxygen_file:
+ shutil.copyfile(options.doxygen_file, doxygen_file.name)
+ with open(doxygen_file.name, 'r') as f:
+ contents = f.read()
+ if contents is None:
+ raise Exception('Failed to read %s' % options.doxygen_file)
+
+ keys = {
+ '%VERSION%':
+ options.version,
+ '%OUTPUT_DIR%':
+ os.path.relpath(html_output_dir, options.working_dir)
+ }
+ for (k, v) in list(keys.items()):
+ v = v.replace('\\', '\\\\')
+ contents = re.sub(k, v, contents)
+
+ with open(doxygen_file.name, 'w') as f:
+ f.write(contents)
+
+ old_cwd = os.getcwd()
+ try:
+ # if no ndk headers exist, return.
+ if os.path.exists(options.working_dir) is not True:
+ print("no ndk headers exist, return")
+ return
+ os.chdir(options.working_dir)
+
+ doxygen_path = distutils.spawn.find_executable('doxygen')
+ if doxygen_path is None:
+ print(
+ "Warning: Failed to find doxygen, please install doxygen with \"sudo apt-get install doxygen\" on Ubuntu"
+ )
+ return
+ os.makedirs(
+ os.path.relpath(html_output_dir, options.working_dir),
+ exist_ok=True)
+ cmd = [doxygen_path, doxygen_file.name]
+ build_utils.check_output(cmd)
+ finally:
+ os.chdir(old_cwd)
+
+
+def main(args):
+ options = parse_args(args)
+
+ depfile_deps = ([options.doxygen_file])
+ for root, _, filenames in os.walk(options.working_dir):
+ for f in filenames:
+ depfile_deps += ([os.path.join(root, f)])
+
+ html_output_dir = options.output
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: generate_ndk_docs(options, html_output_dir),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=depfile_deps,
+ input_strings=[options.version, options.working_dir],
+ output_paths=([html_output_dir]),
+ record_path=options.record_path,
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/ndk/generate_ndk_docs.pydeps b/dsoftbus/build/ohos/ndk/generate_ndk_docs.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..2605a6f28b902a3f4e536cb35f438c5f8133bccc
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/generate_ndk_docs.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/ndk --output build/ohos/ndk/generate_ndk_docs.pydeps build/ohos/ndk/generate_ndk_docs.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+generate_ndk_docs.py
diff --git a/dsoftbus/build/ohos/ndk/generate_ndk_stub_file.py b/dsoftbus/build/ohos/ndk/generate_ndk_stub_file.py
new file mode 100755
index 0000000000000000000000000000000000000000..a39f8dd925367b466b569d723a76566fbce3250b
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/generate_ndk_stub_file.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import json
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402
+
+STUB_FUNCTION_TEMPLATE = '''
+void {}() {{ }}
+'''
+
+STUB_VARIABLE_TEMPLATE = '''
+int {} = 0;
+'''
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output', help='generated ndk stub file')
+ parser.add_option('--ndk-description-file', help='ndk description file')
+
+ options, _ = parser.parse_args(args)
+ return options
+
+
+def generate_stub_file(options):
+ contents = []
+ with open(options.ndk_description_file, 'r') as f:
+ interfaces = json.load(f)
+ for inf in interfaces:
+ name = inf.get('name')
+ if inf.get('type') == 'variable':
+ contents.append(STUB_VARIABLE_TEMPLATE.format(name))
+ else:
+ contents.append(STUB_FUNCTION_TEMPLATE.format(name))
+ with open(options.output, 'w') as f:
+ f.write('\n'.join(contents))
+
+
+def main(args):
+ options = parse_args(args)
+
+ depfile_deps = ([options.ndk_description_file])
+
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: generate_stub_file(options),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=depfile_deps,
+ output_paths=([options.output]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/ndk/generate_ndk_stub_file.pydeps b/dsoftbus/build/ohos/ndk/generate_ndk_stub_file.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..7575e06a0ef8f0d2ca0b4e943476014f40dffe96
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/generate_ndk_stub_file.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/ndk --output build/ohos/ndk/generate_ndk_stub_file.pydeps build/ohos/ndk/generate_ndk_stub_file.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+generate_ndk_stub_file.py
diff --git a/dsoftbus/build/ohos/ndk/generate_version_script.py b/dsoftbus/build/ohos/ndk/generate_version_script.py
new file mode 100755
index 0000000000000000000000000000000000000000..eb778dfa6ded1963b07893fbc59c2673e23100aa
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/generate_version_script.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import json
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402
+
+MAP_FILE_TEMPLATE = '''
+%s {
+ global:
+ %s
+ local:
+ *;
+};
+'''
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output', help='generated ndk stub file')
+ parser.add_option('--ndk-description-file', help='ndk description file')
+ parser.add_option('--shlib-name', help='output name of shared library')
+
+ options, _ = parser.parse_args(args)
+ return options
+
+
+def generate_version_script(options):
+ contents = []
+ with open(options.ndk_description_file, 'r') as f:
+ interfaces = json.load(f)
+ for inf in interfaces:
+ name = inf['name']
+ contents.append('\t%s;' % name)
+ with open(options.output, 'w') as f:
+ f.write(MAP_FILE_TEMPLATE %
+ (options.shlib_name.upper(), '\n'.join(contents)))
+
+
+def main(args):
+ options = parse_args(args)
+
+ depfile_deps = ([options.ndk_description_file])
+
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: generate_version_script(options),
+ options,
+ depfile_deps=depfile_deps,
+ input_paths=depfile_deps,
+ input_strings=[options.shlib_name],
+ output_paths=([options.output]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/ndk/generate_version_script.pydeps b/dsoftbus/build/ohos/ndk/generate_version_script.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..90d9964a9fb37c995089ed3af9ff2c1690eaa4b4
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/generate_version_script.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/ndk --output build/ohos/ndk/generate_version_script.pydeps build/ohos/ndk/generate_version_script.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+generate_version_script.py
diff --git a/dsoftbus/build/ohos/ndk/ndk.gni b/dsoftbus/build/ohos/ndk/ndk.gni
new file mode 100755
index 0000000000000000000000000000000000000000..ce4d5564dd07933b5ffe427e67b04e2cba8320f7
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/ndk.gni
@@ -0,0 +1,448 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/ohos/copy_ex.gni")
+import("//build/config/python.gni")
+import("//build/ohos/build_var.gni")
+import("//build/ohos/notice/notice.gni")
+import("//build/ohos_var.gni")
+
+declare_args() {
+ sdk_native = "sdk-native"
+ version_script_suffix = ".map.txt"
+ ndk_signature_save_dir = "//interface/sdk-native"
+ ndk_zip_prefix = "native"
+}
+
+ndk_os_irrelevant_out_dir = "$root_out_dir/${sdk_native}/os-irrelevant"
+ndk_os_specific_out_dir = "$root_out_dir/${sdk_native}/os-specific"
+ndk_signature_out_dir = "$root_out_dir/${sdk_native}/signature"
+
+ndk_headers_out_dir = "$ndk_os_irrelevant_out_dir/sysroot/usr/include"
+ndk_libraries_out_dir = "$ndk_os_irrelevant_out_dir/sysroot/usr/lib"
+ndk_docs_out_dir = "$ndk_os_irrelevant_out_dir/docs"
+
+windows_system = "windows"
+linux_system = "linux"
+darwin_system = "darwin"
+
+ndk_windows_specific_out_dir = "${ndk_os_specific_out_dir}/${windows_system}"
+ndk_darwin_specific_out_dir = "${ndk_os_specific_out_dir}/${darwin_system}"
+ndk_linux_specific_out_dir = "${ndk_os_specific_out_dir}/${linux_system}"
+
+ndk_windows_toolchains_out_dir = "${ndk_windows_specific_out_dir}/llvm"
+ndk_windows_tools_out_dir = "${ndk_windows_specific_out_dir}/build-tools"
+
+ndk_darwin_toolchains_out_dir = "${ndk_darwin_specific_out_dir}/llvm"
+ndk_darwin_tools_out_dir = "${ndk_darwin_specific_out_dir}/build-tools"
+
+ndk_linux_toolchains_out_dir = "${ndk_linux_specific_out_dir}/llvm"
+ndk_linux_tools_out_dir = "${ndk_linux_specific_out_dir}/build-tools"
+
+# Generate NDK library from NDK description file.
+#
+# Variables:
+# ndk_description_file:
+# min_compact_version: string specifies the minimal compactible version of NDK.
+# set to major_version in default.
+#
+template("ohos_ndk_library") {
+ forward_variables_from(invoker, [ "testonly" ])
+ assert(defined(invoker.ndk_description_file),
+ "ndk description file is necessary ")
+
+ _ndk_description_file = invoker.ndk_description_file
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ _ndk_config_output = "$target_gen_dir/$target_name.build_config"
+ _sdk_version = exec_script("//build/ohos/version.py",
+ [
+ "--version",
+ sdk_version,
+ ],
+ "list lines")
+ _min_compact_version = _sdk_version[0]
+ if (defined(invoker.min_compact_version)) {
+ _min_compact_version = invoker.min_compact_version
+ }
+ assert(_min_compact_version != "0") # mark as used
+
+ _output_name = target_name
+ if (defined(invoker.output_name)) {
+ _output_name = invoker.output_name
+ }
+
+ _output_extension = "z.so"
+ if (defined(invoker.output_extension)) {
+ _output_extension = invoker.output_extension
+ }
+
+ _ndk_stub_target = "${target_name}__ndk_stub"
+ _generated_ndk_stub_file = target_gen_dir + "/${target_name}.ndk/" +
+ get_path_info(_ndk_description_file, "name") + ".c"
+ action_with_pydeps(_ndk_stub_target) {
+ deps = _deps
+ script = "//build/ohos/ndk/generate_ndk_stub_file.py"
+ depfile = "${target_gen_dir}/${target_name}.d"
+ args = [
+ "--output",
+ rebase_path(_generated_ndk_stub_file, root_build_dir),
+ "--ndk-description-file",
+ rebase_path(_ndk_description_file, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+ inputs = [ _ndk_description_file ]
+ outputs = [ _generated_ndk_stub_file ]
+ }
+
+ _ndk_config_target = "${target_name}__ndk_config"
+ generated_file(_ndk_config_target) {
+ deps = [ ":$_ndk_stub_target" ]
+ output_conversion = "json"
+ outputs = [ _ndk_config_output ]
+
+ data_keys = [ "ndk_config" ]
+ }
+
+ if (build_ohos_ndk) {
+ ndk_toolchains = [
+ "//build/toolchain/ohos:ohos_clang_arm",
+ "//build/toolchain/ohos:ohos_clang_arm64",
+ ]
+ } else {
+ # Don't enable cross compile if build_ohos_ndk is false.
+ # Cross compiling in this case may cause build failure in some scenario,
+ # such as build for ASAN.
+ ndk_toolchains = [ "//build/toolchain/ohos:ohos_clang_${target_cpu}" ]
+ }
+
+ _accumulated_deps = []
+
+ foreach(_toolchain, ndk_toolchains) {
+ if (_toolchain == "//build/toolchain/ohos:ohos_clang_arm") {
+ _ndk_shlib_directory = "arm-linux-ohos"
+ } else if (_toolchain == "//build/toolchain/ohos:ohos_clang_arm64") {
+ _ndk_shlib_directory = "aarch64-linux-ohos"
+ } else if (_toolchain == "//build/toolchain/ohos:ohos_clang_x86_64") {
+ _ndk_shlib_directory = "x86_64-linux-ohos"
+ }
+
+ assert(defined(_ndk_shlib_directory))
+ _output_dir = "$ndk_libraries_out_dir/$_ndk_shlib_directory"
+ _output_ndk_shlib = "${_output_dir}/lib${_output_name}.${_output_extension}"
+
+ _toolchain_name = get_label_info(_toolchain, "name")
+
+ _ndk_shlib_target = "${target_name}_${_toolchain_name}__ndk_shlib"
+
+ shared_library(_ndk_shlib_target) {
+ forward_variables_from(invoker,
+ [
+ "cflags",
+ "ldflags",
+ "configs",
+ "libs",
+ "include_dirs",
+ ])
+ deps = [ ":$_ndk_stub_target" ]
+ sources = [ _generated_ndk_stub_file ]
+ output_dir = target_out_dir + "/$_toolchain_name"
+ output_name = _output_name
+ output_extension = _output_extension
+ }
+
+ _ndk_shlib_copy_target = "${target_name}_${_toolchain_name}__copy"
+ copy(_ndk_shlib_copy_target) {
+ deps = [ ":$_ndk_shlib_target($_toolchain)" ]
+ sources = [ get_label_info(":$_ndk_shlib_target($_toolchain)",
+ "target_out_dir") +
+ "/$_toolchain_name/lib$_output_name.$_output_extension" ]
+ outputs = [ _output_ndk_shlib ]
+ }
+ _accumulated_deps += [ ":$_ndk_shlib_copy_target" ]
+ _accumulated_deps += [ ":$_ndk_shlib_target" ]
+ }
+
+ _ndk_version_script_target = target_name
+ if (current_toolchain == default_toolchain) {
+ # Notice file for different toolchains are the same, it's enough to
+ # collect notice file for default toolchain.
+ _notice_target = "${target_name}__ndk_libraries_notice"
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+ module_source_dir =
+ get_label_info(":${_ndk_version_script_target}", "dir")
+ outputs = [ "$ndk_notice_dir/sysroot/usr/lib/lib$_output_name.$_output_extension.txt" ]
+ }
+ _accumulated_deps += [ ":$_notice_target" ]
+ }
+ if (defined(invoker.license_file)) {
+ not_needed(invoker, [ "license_file" ])
+ }
+ if (defined(invoker.license_as_sources)) {
+ not_needed(invoker, [ "license_as_sources" ])
+ }
+
+ _generated_version_script =
+ target_gen_dir + "/$target_name" + version_script_suffix
+ action_with_pydeps(_ndk_version_script_target) {
+ deps = _accumulated_deps
+ script = "//build/ohos/ndk/generate_version_script.py"
+ depfile = "${target_gen_dir}/${target_name}.d"
+ args = [
+ "--output",
+ rebase_path(_generated_version_script, root_build_dir),
+ "--ndk-description-file",
+ rebase_path(_ndk_description_file, root_build_dir),
+ "--shlib-name",
+ _output_name,
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+ outputs = [ _generated_version_script ]
+ }
+}
+
+# Specify an ndk copy target
+# NOTE: It's an internal template, not designed for everyone use.
+#
+# Input variables:
+# dest_dir: Root directory where sources are copied to.
+# sources: List of files and directories to copy to ${dest_dir}.
+#
+template("ohos_ndk_copy") {
+ assert(defined(invoker.sources) && defined(invoker.dest_dir),
+ "sources and dest_dir are necessary ")
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+ _dest = invoker.dest_dir
+
+ set_sources_assignment_filter([ "*os-irrelevant*" ])
+ sources = [ _dest ]
+ if (sources == []) {
+ _notice_rel_dir = ndk_os_irrelevant_out_dir
+ } else {
+ _notice_rel_dir = ndk_os_specific_out_dir
+ }
+ set_sources_assignment_filter([])
+ sources = []
+
+ _main_target_name = target_name
+ _notice_target = "${target_name}__notice"
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ outputs = []
+
+ foreach(s, invoker.sources) {
+ outputs += [ ndk_notice_dir + "/" + rebase_path(_dest, _notice_rel_dir) +
+ "/" + get_path_info(s, "file") + ".txt" ]
+ }
+ }
+ _deps += [ ":$_notice_target" ]
+
+ copy_ex(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ forward_variables_from(invoker, [ "outputs" ])
+ deps = _deps
+ sources = invoker.sources
+
+ if (!defined(outputs)) {
+ outputs = []
+ foreach(src, invoker.sources) {
+ _all_files = []
+ _all_files =
+ exec_script("//build/scripts/find.py",
+ [
+ rebase_path(src, root_build_dir),
+ "--base-dir=" + rebase_path(src, root_build_dir),
+ "--return-relpath",
+ "--follow-symlinks",
+ ],
+ "list lines")
+
+ if (_all_files == [ "." ]) {
+ outputs += [ _dest + "/" + get_path_info(src, "file") ]
+ } else {
+ foreach(f, _all_files) {
+ outputs += [ _dest + "/" + get_path_info(src, "name") + "/$f" ]
+ }
+ }
+ }
+ }
+
+ dest = _dest
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--clear",
+ "--follow-outside-symlinks",
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--stamp",
+ rebase_path("$target_gen_dir/$target_name.stamp", root_build_dir),
+ ]
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+ }
+}
+
+# Specify ndk header files
+#
+# Input variables:
+# dest_dir: Root directory where sources are copied to.
+# sources: List of files and directories to copy to ${dest_dir}.
+#
+template("ohos_ndk_headers") {
+ assert(defined(invoker.sources), "sources are necessary ")
+
+ if (defined(invoker.dest_dir)) {
+ _dest_dir = invoker.dest_dir
+ } else {
+ _dest_dir = "$ndk_headers_out_dir"
+ }
+
+ _ndk_header_signature_target = "${target_name}__ndk_signature_check"
+ _target_name = target_name
+ action_with_pydeps(_ndk_header_signature_target) {
+ if (defined(invoker.deps)) {
+ deps = invoker.deps
+ }
+
+ script = "//build/ohos/ndk/check_ndk_header_signature.py"
+ depfile = "${target_gen_dir}/${target_name}.d"
+
+ inputs = []
+ foreach(src, invoker.sources) {
+ _all_files = []
+ _all_files = exec_script("//build/scripts/find.py",
+ [ rebase_path(src) ],
+ "list lines")
+
+ inputs += _all_files
+ }
+
+ _output = "$target_gen_dir/$target_name.stamp"
+
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--generated-signature",
+ rebase_path("$ndk_signature_out_dir/$_target_name/signature.txt",
+ root_build_dir),
+ "--saved-signature",
+ rebase_path("$ndk_signature_save_dir/$_target_name/signature.txt",
+ root_build_dir),
+ "--output",
+ rebase_path(_output, root_build_dir),
+ ]
+ foreach(f, inputs) {
+ args += [
+ "--headers",
+ rebase_path(f, root_build_dir),
+ "--root-build-dir",
+ rebase_path("//", root_build_dir),
+ ]
+ }
+
+ if (check_ndk_signature) {
+ args += [ "--check-signature" ]
+ }
+
+ outputs = [ _output ]
+ }
+
+ ohos_ndk_copy(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "deps",
+ "args",
+ "dest_dir",
+ ])
+ deps = [ ":$_ndk_header_signature_target" ]
+
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ dest_dir = _dest_dir
+
+ args = [ "--ignore-stale" ]
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+ }
+}
+
+# Specify ndk toolchains
+#
+# Input variables:
+# dest_dir: Root directory where sources are copied to.
+# sources: List of files and directories to copy to ${dest_dir}.
+#
+template("ohos_ndk_toolchains") {
+ ohos_ndk_copy(target_name) {
+ forward_variables_from(invoker, "*")
+ }
+}
+
+# Specify ndk prebuilt library
+#
+# Input variables:
+# dest_dir: Root directory where sources are copied to.
+# sources: List of files and directories to copy to ${dest_dir}.
+#
+template("ohos_ndk_prebuilt_library") {
+ if (defined(invoker.dest_dir)) {
+ _dest_dir = invoker.dest_dir
+ } else {
+ _dest_dir = "$ndk_libraries_out_dir"
+ }
+
+ ohos_ndk_copy(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "args",
+ "dest_dir",
+ ])
+ dest_dir = _dest_dir
+
+ args = [ "--ignore-stale" ]
+ if (defined(invoker.args)) {
+ args += invoker.args
+ }
+ }
+}
diff --git a/dsoftbus/build/ohos/ndk/scan_ndk_targets.py b/dsoftbus/build/ohos/ndk/scan_ndk_targets.py
new file mode 100755
index 0000000000000000000000000000000000000000..6bc3a7eefc391a04f63fdc175600466ed1bd2a74
--- /dev/null
+++ b/dsoftbus/build/ohos/ndk/scan_ndk_targets.py
@@ -0,0 +1,137 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import subprocess
+import fnmatch
+import glob
+import re
+import errno
+import codecs
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util import build_utils # noqa: E402,E501 pylint: disable=E0401,C0413
+
+ALL_NDK_TARGETS_TEMPLATE = '''
+group("all_ndk_targets") {{
+ deps = [ {} ]
+}}
+'''
+
+ALL_NDK_TEMPLATES_NAMES = [
+ "ohos_ndk_headers", "ohos_ndk_library",
+ "ohos_ndk_prebuilt_library", "ohos_ndk_copy"
+]
+
+
+def remove_comment(file):
+ contents = []
+ with open(file, 'r') as in_file:
+ for line in in_file:
+ # Strip comments in gn file.
+ # If someone uses # as part of a string, ignore it.
+ if re.match(r'.*?("\S*#\S*")', line):
+ pass
+ else:
+ line = re.sub(r'(#.*?)\n', '', line)
+ contents.append(line)
+ return contents
+
+
+def do_dos2unix(in_file, out_file):
+ contents = ''
+ with open(in_file, 'r+b') as fin:
+ contents = fin.read()
+ # Remove BOM header.
+ if contents.startswith(codecs.BOM_UTF8):
+ contents = contents[len(codecs.BOM_UTF8):]
+ contents = re.sub(r'\r\n', '\n', contents.decode())
+ with open(out_file, 'w') as fout:
+ fout.write(contents)
+
+
+def do_gn_format(gn_file, org_file):
+ cmd = ['gn', 'format', gn_file]
+ child = subprocess.Popen(cmd)
+ child.communicate()
+ if child.returncode:
+ print(
+ 'Error: Something is wrong with {}, pleae check file encoding or format'
+ .format(org_file))
+
+
+def get_ndk_targets(file, options):
+ ndk_targets = []
+ with build_utils.temp_dir() as tmp:
+ gn_file = os.path.join(tmp, os.path.basename(file))
+ do_dos2unix(file, gn_file)
+ do_gn_format(gn_file, file)
+ contents = remove_comment(gn_file)
+ for template_name in ALL_NDK_TEMPLATES_NAMES:
+ pattern = re.escape(template_name) + r"\(\"(.*)\"\)"
+ targets = re.findall(pattern, ''.join(contents))
+ for target in targets:
+ ndk_targets.append('\"//{}:{}\",'.format(
+ os.path.relpath(os.path.dirname(file), options.root_dir),
+ target))
+ return ndk_targets
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--output', required=True)
+ parser.add_argument('--root-dir', required=True)
+
+ options = parser.parse_args()
+
+ paths = glob.glob(os.path.join(options.root_dir, "*"))
+ dirs = [
+ d for d in paths if os.path.isdir(d)
+ and not fnmatch.fnmatch(d, os.path.join(options.root_dir, ".repo"))
+ and not fnmatch.fnmatch(d, os.path.join(options.root_dir, "out"))
+ ]
+
+ gn_list = []
+ for d in dirs:
+ gn_list += glob.glob(os.path.join(d, "**/BUILD.gn"), recursive=True)
+
+ ndk_targets = []
+ for gn_file in gn_list:
+ # Skip dead link.
+ try:
+ os.stat(gn_file)
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ continue
+ else:
+ raise Exception("Error: failed to stat {}".format(gn_file))
+ ndk_targets.extend(get_ndk_targets(gn_file, options))
+
+ ndk_contents = ALL_NDK_TARGETS_TEMPLATE.format('\n'.join(ndk_targets))
+
+ os.makedirs(os.path.dirname(options.output), exist_ok=True)
+ with open(options.output, 'w') as f:
+ f.write(ndk_contents)
+
+ # Call gn format to make the output gn file prettier.
+ cmd = ['gn', 'format', options.output]
+ subprocess.check_output(cmd)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/notice/collect_module_notice_file.py b/dsoftbus/build/ohos/notice/collect_module_notice_file.py
new file mode 100755
index 0000000000000000000000000000000000000000..d5ae5a7501f040b135279a4d0ef3ad868f325432
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/collect_module_notice_file.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+from scripts.util.file_utils import read_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+README_FILE_NAME = 'README.OpenSource'
+LICENSE_CANDIDATES = [
+ 'LICENSE',
+ 'License',
+ 'NOTICE',
+ 'Notice',
+ 'COPYRIGHT',
+ 'Copyright'
+]
+
+
+def is_top_dir(current_dir):
+ return os.path.exists(os.path.join(current_dir, '.gn'))
+
+
+def find_license_recursively(current_dir, default_license):
+ if is_top_dir(current_dir):
+ return default_license
+ for file in ['LICENSE', 'NOTICE', 'License', 'Copyright']:
+ candidate = os.path.join(current_dir, file)
+ if os.path.exists(os.path.join(current_dir, file)):
+ return os.path.join(candidate)
+ return find_license_recursively(os.path.dirname(current_dir),
+ default_license)
+
+
+def get_license_from_readme(readme_path):
+ contents = read_json_file(readme_path)
+ if contents is None:
+ raise Exception("Error: failed to read {}.".format(readme_path))
+
+ notice_file = contents[0].get('License File').strip()
+ if notice_file is None:
+ raise Exception("Error: value of notice file is empty in {}.".format(
+ readme_path))
+
+ return os.path.join(os.path.dirname(readme_path), notice_file)
+
+
+def do_collect_notice_files(options, depfiles):
+ notice_file = options.license_file
+ if notice_file is None:
+ readme_path = os.path.join(options.module_source_dir,
+ README_FILE_NAME)
+ if os.path.exists(readme_path):
+ depfiles.append(readme_path)
+ notice_file = get_license_from_readme(readme_path)
+
+ if notice_file is None:
+ notice_file = find_license_recursively(options.module_source_dir,
+ options.default_license)
+
+ if notice_file:
+ for output in options.output:
+ os.makedirs(os.path.dirname(output), exist_ok=True)
+ if os.path.exists(notice_file):
+ shutil.copy(notice_file, output)
+ else:
+ build_utils.touch(output)
+ depfiles.append(notice_file)
+
+
+def main(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = argparse.ArgumentParser()
+ build_utils.add_depfile_option(parser)
+
+ parser.add_argument('--license-file', required=False)
+ parser.add_argument('--default-license', required=True)
+ parser.add_argument('--output', action='append', required=False)
+ parser.add_argument('--module-source-dir',
+ help='source directory of this module',
+ required=True)
+
+ options = parser.parse_args()
+ depfiles = []
+
+ do_collect_notice_files(options, depfiles)
+ if options.license_file:
+ depfiles.append(options.license_file)
+ build_utils.write_depfile(options.depfile, options.output[0], depfiles)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/notice/collect_module_notice_file.pydeps b/dsoftbus/build/ohos/notice/collect_module_notice_file.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..3a4fbd3b9b20da8fd95ba4dd852d83e10d43cfa7
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/collect_module_notice_file.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/notice --output build/ohos/notice/collect_module_notice_file.pydeps build/ohos/notice/collect_module_notice_file.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+collect_module_notice_file.py
diff --git a/dsoftbus/build/ohos/notice/collect_system_notice_files.py b/dsoftbus/build/ohos/notice/collect_system_notice_files.py
new file mode 100755
index 0000000000000000000000000000000000000000..f5f936caba2b03347551a374a99ef42b81564c38
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/collect_system_notice_files.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+import json
+import glob
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+
+
+def collect_notice_files(options, dest_dir, depfiles):
+ subsystem_info_files = []
+ with open(options.install_info_file, 'r') as file:
+ install_info = json.load(file)
+ for item in install_info:
+ subsystem_info_files.append(item['part_info_file'])
+ depfiles.extend(subsystem_info_files)
+
+ module_info_files = []
+ for subsystem_info in subsystem_info_files:
+ with open(subsystem_info, 'r') as file:
+ subsystem_info = json.load(file)
+ for item in subsystem_info:
+ module_info_files.append(item['module_info_file'])
+ depfiles.extend(module_info_files)
+
+ for module_info in module_info_files:
+ with open(module_info, 'r') as file:
+ module_info = json.load(file)
+ if 'notice' in module_info and module_info[
+ 'type'] != "java_library":
+ notice_file = module_info['notice']
+ if os.path.exists(notice_file) is False or os.stat(
+ notice_file).st_size == 0:
+ continue
+ dest = os.path.join(dest_dir,
+ "{}.txt".format(module_info['dest'][0]))
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
+ shutil.copyfile(module_info['notice'], dest)
+ depfiles.append(module_info['notice'])
+
+ notice_files = build_utils.get_all_files(options.notice_root_dir)
+ depfiles.extend(notice_files)
+ for file in notice_files:
+ dest = os.path.join(dest_dir,
+ os.path.relpath(file, options.notice_root_dir))
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
+ shutil.copyfile(file, dest)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--install-info-file', required=True)
+ parser.add_argument('--notice-root-dir', required=True)
+ parser.add_argument('--output-file', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ depfiles = [args.install_info_file]
+ with build_utils.temp_dir() as tmp:
+ collect_notice_files(args, tmp, depfiles)
+ build_utils.zip_dir(args.output_file, tmp)
+
+ if args.depfile:
+ build_utils.write_depfile(args.depfile,
+ args.output_file,
+ sorted(depfiles),
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/notice/collect_system_notice_files.pydeps b/dsoftbus/build/ohos/notice/collect_system_notice_files.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..491e2f7e0276090e97b59e0ed9a70a0580fd68cf
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/collect_system_notice_files.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/notice --output build/ohos/notice/collect_system_notice_files.pydeps build/ohos/notice/collect_system_notice_files.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+collect_system_notice_files.py
diff --git a/dsoftbus/build/ohos/notice/license b/dsoftbus/build/ohos/notice/license
new file mode 100644
index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/license
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/dsoftbus/build/ohos/notice/merge_notice_files.py b/dsoftbus/build/ohos/notice/merge_notice_files.py
new file mode 100755
index 0000000000000000000000000000000000000000..879af7d37d38c2827bd4a25a185753d269010331
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/merge_notice_files.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+
+Usage: gen_notice_file --output-image-name system \
+ --notice-file-root xx/NOTICE_FILE \
+ --notice-file-install-path xx/system \
+ --output-title notice_title_string
+
+Generate the project notice files, including both text and xml files.
+
+"""
+from collections import defaultdict
+import argparse
+import hashlib
+import os
+import os.path
+import sys
+import gzip
+import shutil
+import glob
+import re
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+from scripts.util.file_utils import write_json_file # noqa: E402
+
+XML_ESCAPE_TABLE = {
+ "&": "&",
+ '"': """,
+ "'": "'",
+ ">": ">",
+ "<": "<",
+}
+
+
+def copy_static_library_notices(options, depfiles):
+ valid_notices = []
+ basenames = []
+ for file in build_utils.get_all_files(options.static_library_notice_dir):
+ if os.stat(file).st_size == 0:
+ continue
+ if not file.endswith('.a.txt'):
+ continue
+ notice_file_name = os.path.basename(file)
+ if file not in basenames:
+ basenames.append(notice_file_name)
+ valid_notices.append(file)
+ depfiles.append(file)
+
+ for file in valid_notices:
+ if options.image_name == "system":
+ if options.target_cpu == "arm64" or options.target_cpu == "x64":
+ install_dir = "system/lib64"
+ elif options.target_cpu == "arm":
+ install_dir = "system/lib"
+ else:
+ continue
+ elif options.image_name == "sdk":
+ install_dir = "toolchains/lib"
+ elif options.image_name == "ndk":
+ install_dir = "sysroot/usr/lib"
+ else:
+ continue
+ dest = os.path.join(options.notice_root_dir, install_dir,
+ os.path.basename(file))
+ os.makedirs(os.path.dirname(dest), exist_ok=True)
+ shutil.copyfile(file, dest)
+
+
+def write_file(file, string):
+ print(string, file=file)
+
+
+def compute_hash(file):
+ sha256 = hashlib.sha256()
+ with open(file, 'rb') as file_fd:
+ for line in file_fd:
+ sha256.update(line)
+ return sha256.hexdigest()
+
+
+def get_entity(text):
+ return "".join(XML_ESCAPE_TABLE.get(c, c) for c in text)
+
+
+def generate_txt_notice_files(file_hash, input_dir, output_filename,
+ notice_title):
+ with open(output_filename, "w") as output_file:
+ write_file(output_file, notice_title)
+ for value in file_hash:
+ write_file(output_file, '=' * 60)
+ write_file(output_file, "Notices for file(s):")
+ for filename in value:
+ write_file(
+ output_file, '/{}'.format(
+ re.sub('.txt.*', '',
+ os.path.relpath(filename, input_dir))))
+ write_file(output_file, '-' * 60)
+ with open(value[0], errors='ignore') as temp_file_hd:
+ write_file(output_file, temp_file_hd.read())
+
+
+def generate_xml_notice_files(files_with_same_hash, input_dir,
+ output_filename):
+ id_table = {}
+ for file_key in files_with_same_hash.keys():
+ for filename in files_with_same_hash[file_key]:
+ id_table[filename] = file_key
+
+ with open(output_filename, "w") as output_file:
+ write_file(output_file, '')
+ write_file(output_file, "")
+
+ # Flatten the lists into a single filename list
+ sorted_filenames = sorted(id_table.keys())
+
+ # write out a table of contents
+ for filename in sorted_filenames:
+ stripped_filename = re.sub('.txt.*', '',
+ os.path.relpath(filename, input_dir))
+ write_file(
+ output_file, '%s' %
+ (id_table.get(filename), stripped_filename))
+
+ write_file(output_file, '')
+ write_file(output_file, '')
+
+ processed_file_keys = []
+ # write the notice file lists
+ for filename in sorted_filenames:
+ file_key = id_table.get(filename)
+ if file_key in processed_file_keys:
+ continue
+ processed_file_keys.append(file_key)
+
+ with open(filename, errors='ignore') as temp_file_hd:
+ write_file(
+ output_file,
+ ''
+ .format(file_key, get_entity(temp_file_hd.read())))
+ write_file(output_file, '')
+
+ # write the file complete node.
+ write_file(output_file, "")
+
+
+def compress_file_to_gz(src_file_name, gz_file_name):
+ with open(src_file_name, mode='rb') as src_file_fd:
+ with gzip.open(gz_file_name, mode='wb') as gz_file_fd:
+ gz_file_fd.writelines(src_file_fd)
+
+
+def handle_zipfile_notices(zip_file):
+ notice_file = '{}.txt'.format(zip_file[:-4])
+ with build_utils.temp_dir() as tmp_dir:
+ build_utils.extract_all(zip_file, tmp_dir, no_clobber=False)
+ files = build_utils.get_all_files(tmp_dir)
+ contents = []
+ for file in files:
+ with open(file, 'r') as fd:
+ data = fd.read()
+ if data not in contents:
+ contents.append(data)
+ with open(notice_file, 'w') as merged_notice:
+ merged_notice.write('\n\n'.join(contents))
+ return notice_file
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--image-name')
+ parser.add_argument('--collected-notice-zipfile',
+ action='append',
+ help='zipfile stors collected notice files')
+ parser.add_argument('--notice-root-dir', help='where notice files store')
+ parser.add_argument('--output-notice-txt', help='output notice.txt')
+ parser.add_argument('--output-notice-gz', help='output notice.txt')
+ parser.add_argument('--notice-title', help='title of notice.txt')
+ parser.add_argument('--static-library-notice-dir',
+ help='path to static library notice files')
+ parser.add_argument('--target-cpu', help='cpu arch')
+ parser.add_argument('--depfile', help='depfile')
+ parser.add_argument('--notice-module-info',
+ help='module info file for notice target')
+ parser.add_argument('--notice-install-dir',
+ help='install directories of notice file')
+
+ args = parser.parse_args()
+
+ notice_dir = args.notice_root_dir
+ depfiles = []
+ if args.collected_notice_zipfile:
+ for zip_file in args.collected_notice_zipfile:
+ build_utils.extract_all(zip_file, notice_dir, no_clobber=False)
+ else:
+ depfiles += build_utils.get_all_files(notice_dir)
+ # Copy notice of static targets to notice_root_dir
+ copy_static_library_notices(args, depfiles)
+
+ zipfiles = glob.glob('{}/**/*.zip'.format(notice_dir), recursive=True)
+
+ txt_files = glob.glob('{}/**/*.txt'.format(notice_dir), recursive=True)
+ txt_files += glob.glob('{}/**/*.txt.?'.format(notice_dir), recursive=True)
+
+ outputs = [args.output_notice_txt, args.output_notice_gz]
+ if args.notice_module_info:
+ outputs.append(args.notice_module_info)
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: do_merge_notice(args, zipfiles, txt_files),
+ args,
+ depfile_deps=depfiles,
+ input_paths=depfiles,
+ input_strings=args.notice_title + args.target_cpu,
+ output_paths=(outputs))
+
+
+def do_merge_notice(args, zipfiles, txt_files):
+ notice_dir = args.notice_root_dir
+ notice_txt = args.output_notice_txt
+ notice_gz = args.output_notice_gz
+ notice_title = args.notice_title
+
+ if not notice_txt.endswith('.txt'):
+ raise Exception(
+ 'Error: input variable output_notice_txt must ends with .txt')
+ if not notice_gz.endswith('.xml.gz'):
+ raise Exception(
+ 'Error: input variable output_notice_gz must ends with .xml.gz')
+
+ notice_xml = notice_gz.replace('.gz', '')
+
+ files_with_same_hash = defaultdict(list)
+ for file in zipfiles:
+ txt_files.append(handle_zipfile_notices(file))
+
+ for file in txt_files:
+ if os.stat(file).st_size == 0:
+ continue
+ file_hash = compute_hash(file)
+ files_with_same_hash[file_hash].append(file)
+
+ file_sets = [
+ sorted(files_with_same_hash[hash])
+ for hash in sorted(files_with_same_hash.keys())
+ ]
+
+ if file_sets is not None:
+ generate_txt_notice_files(file_sets, notice_dir, notice_txt,
+ notice_title)
+
+ if files_with_same_hash is not None:
+ generate_xml_notice_files(files_with_same_hash, notice_dir, notice_xml)
+ compress_file_to_gz(notice_xml, args.output_notice_gz)
+
+ if args.notice_module_info:
+ module_install_info_list = []
+ module_install_info = {}
+ module_install_info['type'] = 'notice'
+ module_install_info['source'] = args.output_notice_txt
+ module_install_info['install_enable'] = True
+ module_install_info['dest'] = [
+ os.path.join(args.notice_install_dir,
+ os.path.basename(args.output_notice_txt))
+ ]
+ module_install_info_list.append(module_install_info)
+ write_json_file(args.notice_module_info, module_install_info_list)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/dsoftbus/build/ohos/notice/merge_notice_files.pydeps b/dsoftbus/build/ohos/notice/merge_notice_files.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..16de75c9bef611bfe988084a28c163ba6cecf082
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/merge_notice_files.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/notice --output build/ohos/notice/merge_notice_files.pydeps build/ohos/notice/merge_notice_files.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+merge_notice_files.py
diff --git a/dsoftbus/build/ohos/notice/notice.gni b/dsoftbus/build/ohos/notice/notice.gni
new file mode 100755
index 0000000000000000000000000000000000000000..db496848bbe44ecd554de86930d514834e9fe96a
--- /dev/null
+++ b/dsoftbus/build/ohos/notice/notice.gni
@@ -0,0 +1,108 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/build_var.gni")
+import("//build/ohos_var.gni")
+import("${build_configs_path}/platforms_list.gni")
+
+declare_args() {
+ sdk_notice_dir = "$root_build_dir/NOTICE_FILES/sdk"
+ sdk_notice_archive_dir = "$root_build_dir/NOTICE_FILES/sdk_archives"
+ ndk_notice_dir = "$root_build_dir/NOTICE_FILES/ndk"
+ static_libraries_notice_dir = "$root_build_dir/NOTICE_FILES/static"
+}
+
+declare_args() {
+ ndk_notice_txt = "$root_build_dir/NOTICE_FILES/ndk-final-notice/NOTICE.txt"
+ ndk_notice_gz = "$root_build_dir/NOTICE_FILES/ndk-final-notice/NOTICE.xml.gz"
+ sdk_notice_txt = "$root_build_dir/NOTICE_FILES/sdk-final-notice/NOTICE.txt"
+ sdk_notice_gz = "$root_build_dir/NOTICE_FILES/sdk-final-notice/NOTICE.xml.gz"
+}
+
+# Gen notice file
+# private template
+#
+template("collect_notice") {
+ assert(defined(invoker.module_source_dir), "module_source_dir is required.")
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "module_name",
+ "module_source_dir",
+ "deps",
+ "license_file",
+ "testonly",
+ "module_type",
+ "outputs",
+
+ # Some license file are generated in gn gen.
+ # Such notices should not be used as sources.
+ "license_as_sources",
+ ])
+ script = rebase_path("//build/ohos/notice/collect_module_notice_file.py")
+ depfile = "${target_gen_dir}/$target_name.d"
+
+ if (!defined(outputs)) {
+ outputs = []
+ if (defined(module_type) &&
+ (module_type == "static_library" || module_type == "source_set")) {
+ _current_toolchain = get_label_info(current_toolchain, "name")
+
+ # Although static library and source set are not installed, their
+ # notice files still needs to be collected.
+ # We may collect a little more notice files than needed.
+ outputs += [ "${static_libraries_notice_dir}/$_current_toolchain/$module_name.a.txt" ]
+ } else {
+ if (defined(module_type) && module_type == "java_library" &&
+ defined(license_file) &&
+ get_path_info(license_file, "extension") == "zip") {
+ outputs = [ "$target_out_dir/$module_name.notice.zip" ]
+ } else {
+ outputs += [ "$target_out_dir/$module_name.notice.txt" ]
+ }
+ }
+ }
+
+ args = [
+ "--module-source-dir",
+ rebase_path(module_source_dir, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+
+ # use default Lisense for modules couldn't find lincese
+ "--default-license",
+ rebase_path("//build/ohos/notice/license", root_build_dir),
+ ]
+ foreach(o, outputs) {
+ args += [
+ "--output",
+ rebase_path(o, root_build_dir),
+ ]
+ }
+
+ if (defined(license_file)) {
+ _license_as_sources = true
+ if (defined(license_as_sources)) {
+ _license_as_sources = license_as_sources
+ }
+ if (_license_as_sources) {
+ inputs = [ license_file ]
+ }
+ args += [
+ "--license-file",
+ rebase_path(license_file, root_build_dir),
+ ]
+ }
+ }
+}
diff --git a/dsoftbus/build/ohos/ohos_kits.gni b/dsoftbus/build/ohos/ohos_kits.gni
new file mode 100755
index 0000000000000000000000000000000000000000..3b29994047b1142d3d7b335c90547541c431f646
--- /dev/null
+++ b/dsoftbus/build/ohos/ohos_kits.gni
@@ -0,0 +1,206 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/kits/kits_check.gni")
+import("//build/ohos_var.gni")
+
+# Defines subsystem_sdk
+#
+# example:
+#
+# subsystem_sdk("examples_sdk") {
+# sdk_libs = [
+# {
+# type = "so"
+# name = "interfaces/innerkits/{module_name}:{module_name}"
+# header = {
+# header_files = [
+# "calculator.h",
+# "abc/a.h",
+# "abc/xyz/b.h"
+# ]
+# header_base = "interfaces/innerkits/{module_name}/include/"
+# }
+# },
+# {
+# type = "jar"
+# name = "interfaces/innerkits/{module_name}:{module_name}"
+# header = {
+# header_files = []
+# }
+# },
+# ]
+# subsystem_name = "subsystem_examples"
+# }
+
+template("_ohos_subsystem_sdk") {
+ assert(defined(invoker.sdk_libs), "sdk_libs is required.")
+ assert(invoker.variant != "")
+ assert(invoker.origin_name != "")
+
+ part_name = "common"
+ if (defined(invoker.part_name)) {
+ part_name = invoker.part_name
+ }
+ if (defined(invoker.subsystem_name)) {
+ subsystem_name = invoker.subsystem_name
+ } else {
+ subsystem_name = part_name
+ }
+
+ sdk_manifest = []
+ _deps = []
+ _libs_deps = []
+ foreach(sdk_lib, invoker.sdk_libs) {
+ lib_label = get_label_info(sdk_lib.name, "label_with_toolchain")
+ _libs_deps += [ lib_label ]
+
+ lib_type = "so"
+ if (defined(sdk_lib.type)) {
+ lib_type = sdk_lib.type
+ }
+
+ module_out_dir = get_label_info(lib_label, "target_out_dir")
+ module_name = get_label_info(lib_label, "name")
+ module_info_file = "${module_out_dir}/${module_name}_module_info.json"
+
+ lib_header_files = []
+ lib_header_base = "interfaces/innerkits/${module_name}/include/"
+
+ if (defined(sdk_lib.headers)) {
+ if (defined(sdk_lib.header_base)) {
+ lib_header_base = sdk_lib.header_base
+ }
+ foreach(file, sdk_lib.headers) {
+ lib_header_files += [ string_replace(file, lib_header_base, "") ]
+ }
+ } else if (defined(sdk_lib.header)) {
+ header = {
+ }
+ header = sdk_lib.header
+ if (defined(header.header_base)) {
+ lib_header_base = header.header_base
+ }
+ lib_header_files = header.header_files
+ }
+
+ lib_prebuilt_enable = false
+ lib_prebuilt_source = ""
+ if (defined(sdk_lib.prebuilt_source)) {
+ lib_prebuilt_enable = true
+ lib_prebuilt_source = sdk_lib.prebuilt_source
+ }
+
+ sdk_manifest += [
+ {
+ label = lib_label
+ name = module_name
+ type = lib_type
+ header_files = lib_header_files
+ header_base = rebase_path(lib_header_base, "$root_build_dir")
+ source_info = rebase_path(module_info_file, "$root_build_dir")
+ prebuilt_enable = lib_prebuilt_enable
+ prebuilt_source = lib_prebuilt_source
+ subsystem_name = subsystem_name
+ part_name = part_name
+ origin_name = invoker.origin_name
+ current_toolchain = current_toolchain
+ current_toolchain_dir = rebase_path(root_out_dir, root_build_dir)
+ },
+ ]
+ }
+ _deps += _libs_deps
+
+ subsystem_sdk_desc_file = "${target_out_dir}/${subsystem_name}_sdk_desc.json"
+ write_file(subsystem_sdk_desc_file, sdk_manifest, "json")
+
+ if (check_innersdk_interface && !gen_innersdk_interface_signature) {
+ kits_check_remove("${part_name}_innerkit_remove_check_so") {
+ subsystem_name = part_name
+ if (invoker.variant != "phone") {
+ subsystem_name = invoker.origin_name
+ }
+ sign_file_root_dir = "//interface/innersdk/native"
+ sdk_libs_name = []
+ foreach(_sdk_lib, sdk_manifest) {
+ if (_sdk_lib.type == "so") {
+ sdk_libs_name += [ _sdk_lib.name ]
+ }
+ }
+ output_file = "${target_gen_dir}/innerkit_remove_check_so"
+ sdk_type = "so"
+ }
+ _deps += [ ":${part_name}_innerkit_remove_check_so" ]
+ }
+
+ # gen sdk install modules
+ sdk_install_moudles_file =
+ "${target_gen_dir}/${subsystem_name}_sdk_install_modules.json"
+ generated_file("${subsystem_name}_sdk_info") {
+ outputs = [ sdk_install_moudles_file ]
+ data_keys = [ "install_modules" ]
+ output_conversion = "json"
+ deps = _libs_deps
+ }
+
+ sdk_out_dir = "${root_build_dir}/${innersdk_build_out_dir}/"
+ sdk_build_file = "${sdk_out_dir}/${subsystem_name}/BUILD.gn"
+ sdk_info_file = "${sdk_out_dir}/${subsystem_name}/sdk_info.json"
+
+ action_with_pydeps(target_name) {
+ deps = _deps
+ deps += [ ":${subsystem_name}_sdk_info" ]
+ script = "//build/scripts/gen_sdk_build_file.py"
+ sources = [ subsystem_sdk_desc_file ]
+ outputs = [
+ sdk_build_file,
+ sdk_info_file,
+ ]
+ args = [
+ "--input-file",
+ rebase_path(subsystem_sdk_desc_file, root_build_dir),
+ "--sdk-out-dir",
+ rebase_path("${sdk_out_dir}/${subsystem_name}", root_build_dir),
+ "--output-build-file",
+ rebase_path(sdk_build_file, root_build_dir),
+ "--sdk-info-file",
+ rebase_path(sdk_info_file, root_build_dir),
+ ]
+
+ if (check_innersdk_interface && !gen_innersdk_interface_signature) {
+ innersdk_interface_checkfile_dir = "//interface/innersdk/native"
+ args += [
+ "--signature-file-check-dir",
+ rebase_path(innersdk_interface_checkfile_dir, root_build_dir),
+ ]
+ } else if (gen_innersdk_interface_signature) {
+ args += [
+ "--generate-sig",
+ "--signature-file-gen-dir",
+ rebase_path(
+ "${root_build_dir}/innersdk_interface/native/${subsystem_name}",
+ root_build_dir),
+ ]
+ }
+ }
+}
+
+template("ohos_inner_kits") {
+ _ohos_subsystem_sdk(target_name) {
+ part_name = invoker.part_name
+ sdk_libs = invoker.sdk_libs
+ variant = invoker.variant
+ origin_name = invoker.origin_name
+ }
+}
diff --git a/dsoftbus/build/ohos/ohos_part.gni b/dsoftbus/build/ohos/ohos_part.gni
new file mode 100755
index 0000000000000000000000000000000000000000..4a8a5624290a273a87db879179ca9b1ad30e96ad
--- /dev/null
+++ b/dsoftbus/build/ohos/ohos_part.gni
@@ -0,0 +1,118 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/ebpf.gni")
+import("//build/ohos_var.gni")
+
+template("ohos_part") {
+ assert(defined(invoker.subsystem_name), "module_list is required.")
+ assert(defined(invoker.module_list), "module_list is required.")
+
+ part_label = get_label_info(":${target_name}", "label_with_toolchain")
+ _deps = []
+ foreach(module_label, invoker.module_list) {
+ _deps += [ get_label_info(module_label, "label_with_toolchain") ]
+ }
+
+ # add sdk dep
+ parts_targets_info_file =
+ "${root_build_dir}/build_configs/required_parts_targets_list.json"
+
+ parts_targets_info = read_file(parts_targets_info_file, "json")
+ foreach(part_info, parts_targets_info) {
+ if (part_info.part == part_label) {
+ if (defined(part_info.inner_kits)) {
+ _deps += [ part_info.inner_kits ]
+ }
+ }
+ }
+
+ part_name = target_name
+ if (defined(invoker.origin_name)) {
+ origin_part_name = invoker.origin_name
+ } else {
+ origin_part_name = part_name
+ }
+ if (defined(invoker.variant)) {
+ variant_name = invoker.variant
+ } else {
+ variant_name = "phone"
+ }
+
+ part_modules_info_file = "${target_gen_dir}/${part_name}_modules.json"
+ generated_file("${part_name}_info") {
+ outputs = [ part_modules_info_file ]
+ data_keys = [ "install_modules" ]
+ output_conversion = "json"
+
+ part_install_info = {
+ part_label = part_label
+ part_name = part_name
+ origin_part_name = origin_part_name
+ variant_name = variant_name
+ subsystem_name = invoker.subsystem_name
+ part_info_file =
+ rebase_path(get_label_info(part_label, "target_out_dir"),
+ root_build_dir) + "/${part_name}_install_modules.json"
+ toolchain_label = get_label_info(part_label, "toolchain")
+ build_out_dir = rebase_path(root_out_dir, root_build_dir)
+ }
+ metadata = {
+ part_installed_info = [ part_install_info ]
+ }
+ deps = _deps
+ }
+
+ # ebpf
+ if (ebpf_enable) {
+ if (defined(invoker.ebpf_testcase)) {
+ collect_ebpf_testcase("${part_name}_ebpf_testcase") {
+ ebpf_testcase = invoker.ebpf_testcase
+ }
+ _deps += [ ":${part_name}_ebpf_testcase" ]
+ }
+ }
+
+ part_install_modules_file =
+ "${target_out_dir}/${part_name}_install_modules.json"
+ part_dep_modules_file = "${target_out_dir}/${part_name}_dep_modules.json"
+ part_sdk_modules_info_file =
+ "${target_gen_dir}/${part_name}_sdk_install_modules.json"
+
+ action_with_pydeps(target_name) {
+ deps = [ ":${part_name}_info" ]
+ script = "//build/ohos/generate_part_info.py"
+ sources = [ part_modules_info_file ]
+ outputs = [
+ part_install_modules_file,
+ part_dep_modules_file,
+ ]
+ args = [
+ "--part-name",
+ part_name,
+ "--origin-part-name",
+ origin_part_name,
+ "--input-file",
+ rebase_path(part_modules_info_file, root_build_dir),
+ "--sdk-modules-info-file",
+ rebase_path(part_sdk_modules_info_file, root_build_dir),
+ "--output-install-file",
+ rebase_path(part_install_modules_file, root_build_dir),
+ "--output-deps-file",
+ rebase_path(part_dep_modules_file, root_build_dir),
+ "--current-toolchain",
+ "${current_toolchain}",
+ ]
+ }
+}
diff --git a/dsoftbus/build/ohos/ohos_test.gni b/dsoftbus/build/ohos/ohos_test.gni
new file mode 100755
index 0000000000000000000000000000000000000000..453896e7c2715abd71ea706671384d350e52f21e
--- /dev/null
+++ b/dsoftbus/build/ohos/ohos_test.gni
@@ -0,0 +1,60 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/build_var.gni")
+import("//build/ohos_var.gni")
+
+# Defines ohos_subsystem_test
+#
+# example:
+# ohos_subsystem_test("examples_test") {
+# testonly = true
+# test_packages = [
+# "calculator/test:unittest",
+# "detector/test:unittest",
+# "summator/test:unittest",
+# ]
+# }
+template("_ohos_part_test") {
+ assert(defined(invoker.part_name), "part_name is required.")
+ assert(defined(invoker.test_packages), "test_packages is required.")
+
+ part_name = invoker.part_name
+ subsystem_name = invoker.subsystem_name
+ assert(subsystem_name != "")
+ assert(part_name != "")
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ group(target_name) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ deps = _deps
+ deps += invoker.test_packages
+ }
+}
+
+template("ohos_part_test") {
+ _ohos_part_test(target_name) {
+ testonly = invoker.testonly
+ deps = invoker.deps
+ part_name = invoker.part_name
+ subsystem_name = invoker.subsystem_name
+ test_packages = invoker.test_packages
+ }
+}
diff --git a/dsoftbus/build/ohos/packages/BUILD.gn b/dsoftbus/build/ohos/packages/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..bdd5dab88bcc42c0e5135218b1cbc9a95b1b81c4
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/BUILD.gn
@@ -0,0 +1,477 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/sa_profile/sa_profile.gni")
+
+import("//build/ohos.gni")
+import("//build/ohos/build_var.gni")
+
+# import target_platform_list
+import("${build_configs_path}/platforms_list.gni")
+group("make_packages") {
+ deps = []
+ foreach(_platform, target_platform_list) {
+ deps += [
+ ":${_platform}_install_modules",
+ ":${_platform}_parts_list",
+ ":gen_required_modules_${_platform}",
+ ]
+ }
+}
+
+all_parts_info_file = "${root_build_dir}/all_parts_info.json"
+all_platforms_parts =
+ "${root_build_dir}/build_configs/target_platforms_parts.json"
+
+foreach(_platform, target_platform_list) {
+ current_platform = _platform
+ current_platform_dir = "${product_output_dir}/$current_platform"
+
+ _system_install_info_file =
+ "${current_platform_dir}/system_install_parts.json"
+
+ action_with_pydeps("${current_platform}_parts_list") {
+ script = "//build/ohos/packages/parts_install_info.py"
+ deps = [ "//build/ohos/common:merge_all_parts" ]
+ inputs = [ all_parts_info_file ]
+ outputs = [ _system_install_info_file ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--all-parts-info-file",
+ rebase_path(all_parts_info_file, root_build_dir),
+ "--platforms-parts-file",
+ rebase_path(all_platforms_parts, root_build_dir),
+ "--system-install-info-file",
+ rebase_path(_system_install_info_file, root_build_dir),
+ "--current-platform",
+ current_platform,
+ ]
+ }
+
+ post_process_modules_list = []
+
+ # sa profile install
+ sa_install_modules_info_file =
+ "${current_platform_dir}/sa_profile/sa_install_info.json"
+ _merged_sa_profile_zipfile =
+ "${current_platform_dir}/sa_profile/merged_sa_profile.zip"
+ _merged_sa_profile_dir = "${current_platform_dir}/sa_profile/merged_sa"
+ ohos_sa_install_info("${current_platform}_sa_profile_install_info") {
+ deps = [ ":${current_platform}_parts_list" ]
+ _outputs = get_target_outputs(":${current_platform}_parts_list")
+ system_install_info_file = _outputs[0]
+ sa_install_info_file = sa_install_modules_info_file
+ merged_sa_profile_zipfile = _merged_sa_profile_zipfile
+ merged_sa_profile_dir = _merged_sa_profile_dir
+ }
+
+ # all post process moudles info
+ post_process_modules_list += [ sa_install_modules_info_file ]
+
+ _notice_root_dir = "${current_platform_dir}/NOTICE_FILES"
+ _collected_notice_zipfile = "${current_platform_dir}/system_notice_files.zip"
+
+ action_with_pydeps("collect_notice_files__${_platform}") {
+ deps = [ ":${_platform}_parts_list" ]
+ script = "//build/ohos/notice/collect_system_notice_files.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ sources = [ _system_install_info_file ]
+ outputs = [ _collected_notice_zipfile ]
+ args = [
+ "--install-info-file",
+ rebase_path(_system_install_info_file, root_build_dir),
+ "--output-file",
+ rebase_path(_collected_notice_zipfile, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--notice-root-dir",
+ rebase_path(_notice_root_dir, root_build_dir),
+ ]
+ }
+
+ _merged_notice_txt = "${current_platform_dir}/NOTICE.txt"
+ _notice_module_info_file = "${current_platform_dir}/NOTICE_module_info.json"
+ action_with_pydeps("merge_system_notice_file_${current_platform}") {
+ deps = [ ":collect_notice_files__${current_platform}" ]
+ script = "//build/ohos/notice/merge_notice_files.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ _merged_notice_gz = "${current_platform_dir}/NOTICE.xml.gz"
+ inputs = [ _collected_notice_zipfile ]
+ outputs = [
+ _merged_notice_txt,
+ _merged_notice_gz,
+ _notice_module_info_file,
+ ]
+ args = [
+ "--image-name",
+ "system",
+ "--notice-root-dir",
+ rebase_path(_notice_root_dir, root_build_dir),
+ "--output-notice-txt",
+ rebase_path(_merged_notice_txt, root_out_dir),
+ "--output-notice-gz",
+ rebase_path(_merged_notice_gz, root_out_dir),
+ "--notice-title",
+ "Notices for files contained in the system filesystem image in this directory:",
+ "--static-library-notice-dir",
+ rebase_path(static_libraries_notice_dir, root_build_dir),
+ "--target-cpu",
+ target_cpu,
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--collected-notice-zipfile",
+ rebase_path(_collected_notice_zipfile, root_build_dir),
+ "--notice-module-info",
+ rebase_path(_notice_module_info_file, root_build_dir),
+ "--notice-install-dir",
+ "system/etc",
+ ]
+ }
+ post_process_modules_list += [ _notice_module_info_file ]
+
+ action("verify_notice_file_${current_platform}") {
+ deps = [ ":merge_system_notice_file_${current_platform}" ]
+
+ script = "//build/core/build_scripts/verify_notice.sh"
+ _verify_result = "${current_platform_dir}/notice_verify_result.out"
+
+ inputs = [ _merged_notice_txt ]
+ outputs = [ _verify_result ]
+
+ args = [
+ rebase_path(_merged_notice_txt, root_build_dir),
+ rebase_path(_verify_result, root_build_dir),
+ rebase_path(current_platform_dir, root_build_dir),
+ ]
+ }
+
+ system_install_modules = "${current_platform_dir}/system_install_modules.json"
+ system_module_info_list = "${current_platform_dir}/system_module_info.json"
+ system_modules_list = "${current_platform_dir}/system_modules_list.txt"
+ _system_image_zipfile = "${current_platform_dir}/system.zip"
+
+ action_with_pydeps("${_platform}_install_modules") {
+ script = "//build/ohos/packages/modules_install.py"
+ public_deps = [ ":${current_platform}_parts_list" ]
+ depfile = "$target_gen_dir/$target_name.d"
+ deps = [
+ ":${current_platform}_sa_profile_install_info",
+ ":merge_system_notice_file_${current_platform}",
+ ":verify_notice_file_${current_platform}",
+ ]
+
+ sources = [
+ _merged_sa_profile_zipfile,
+ _system_install_info_file,
+ ]
+
+ outputs = [
+ system_install_modules,
+ system_module_info_list,
+ system_modules_list,
+ _system_image_zipfile,
+ ]
+
+ args = [
+ "--system-install-info-file",
+ rebase_path(_system_install_info_file, root_build_dir),
+ "--install-modules-info-file",
+ rebase_path(system_install_modules, root_build_dir),
+ "--modules-info-file",
+ rebase_path(system_module_info_list, root_build_dir),
+ "--modules-list-file",
+ rebase_path(system_modules_list, root_build_dir),
+ "--platform-installed-path",
+ rebase_path(current_platform_dir, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--sa-profile-extract-dir",
+ rebase_path(_merged_sa_profile_dir, root_build_dir),
+ "--merged-sa-profile",
+ rebase_path(_merged_sa_profile_zipfile, root_build_dir),
+ "--system-dir",
+ rebase_path("$current_platform_dir/system", root_build_dir),
+ "--system-image-zipfile",
+ rebase_path(_system_image_zipfile, root_build_dir),
+ ]
+
+ _additional_system_files = []
+ foreach(tuple, _additional_system_files) {
+ args += [
+ "--additional-system-files",
+ rebase_path(tuple[0], root_build_dir) + ":" + tuple[1],
+ ]
+ }
+
+ if (post_process_modules_list != []) {
+ sources += post_process_modules_list
+ args += [ "--post-process-modules-info-files" ]
+ args += rebase_path(post_process_modules_list, root_build_dir)
+ }
+ }
+}
+
+# required_install_module_list.json
+foreach(_platform, target_platform_list) {
+ current_platform_dir = "${product_output_dir}/$_platform"
+ _system_install_info_file =
+ "${current_platform_dir}/system_install_parts.json"
+ required_install_modules_file =
+ "${current_platform_dir}/required_install_module_list.json"
+ action_with_pydeps("gen_required_modules_${_platform}") {
+ deps = [ ":${_platform}_parts_list" ]
+ script = "//build/ohos/packages/gen_required_modules_list.py"
+ inputs = [ _system_install_info_file ]
+ outputs = [ required_install_modules_file ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--system-installed-info-file",
+ rebase_path(_system_install_info_file, root_out_dir),
+ "--required-install-modules-file",
+ rebase_path(required_install_modules_file, root_out_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+ }
+}
+
+if (is_asan) {
+ foreach(_platform, target_platform_list) {
+ action("restore_first_stage_artifact_${_platform}") {
+ current_platform = _platform
+ current_platform_dir = "${product_output_dir}/$current_platform"
+ deps = [ ":${_platform}_install_modules" ]
+ print("restore_first_stage_artifact_${_platform}")
+
+ if (current_cpu == "arm64") {
+ module_type = "lib64"
+ } else if (current_cpu == "arm") {
+ module_type = "lib"
+ }
+
+ sources_dir = [
+ "${root_build_dir}/backup/packages/${_platform}/system/$module_type",
+ "${root_build_dir}/backup/packages/${_platform}/system/bin",
+ ]
+
+ restore_dir = "${current_platform_dir}/${system_base_dir}/restore"
+
+ outputs = [ restore_dir ]
+
+ script = "//build/ohos/packages/backup_restore_artifact.py"
+ args = [ "--source-dir" ]
+ foreach(source_dir, sources_dir) {
+ args += [ rebase_path(source_dir, root_out_dir) ]
+ }
+
+ args += [
+ "--output-dir",
+ rebase_path(restore_dir, root_out_dir),
+ ]
+ }
+
+ action("restore_first_stage_symbols_${_platform}") {
+ current_platform = _platform
+ current_platform_dir = "${product_output_dir}/$current_platform"
+ deps = [ ":package_libs_symbols_${_platform}" ]
+ print("restore_first_stage_symbols_${_platform}")
+
+ sources_dir = [
+ "${root_build_dir}/backup/packages/${_platform}/exe.unstripped",
+ "${root_build_dir}/backup/packages/${_platform}/lib.unstripped",
+ ]
+
+ restore_dir = "${current_platform_dir}/restore_symbols"
+
+ outputs = [ restore_dir ]
+
+ script = "//build/ohos/packages/backup_restore_artifact.py"
+ args = [ "--source-dir" ]
+ foreach(source_dir, sources_dir) {
+ args += [ rebase_path(source_dir, root_out_dir) ]
+ }
+
+ args += [
+ "--output-dir",
+ rebase_path(restore_dir, root_out_dir),
+ ]
+ }
+ }
+}
+
+foreach(_platform, target_platform_list) {
+ current_platform_dir = "$product_output_dir/$_platform"
+ _system_install_info_file =
+ "${current_platform_dir}/system_install_parts.json"
+
+ action_with_pydeps("package_libs_symbols_${_platform}") {
+ deps = [ ":${_platform}_parts_list" ]
+ script = "//build/ohos/packages/resources_collect.py"
+ inputs = [ _system_install_info_file ]
+ output_file = "$target_out_dir/$target_name.out"
+ outputs = [ output_file ]
+
+ args = [
+ "--collect-type",
+ "libs_symbols",
+ "--system-install-info-file",
+ rebase_path(_system_install_info_file, root_build_dir),
+ "--resources-dir-list",
+ "lib.unstripped",
+ "exe.unstripped",
+ "--package-output-base-dir",
+ rebase_path("$current_platform_dir", root_build_dir),
+ "--output-file",
+ rebase_path(output_file, root_build_dir),
+ ]
+ }
+}
+
+group("package_libs_symbols") {
+ deps = []
+ foreach(_platform, target_platform_list) {
+ deps += [ ":package_libs_symbols_${_platform}" ]
+ if (is_asan) {
+ deps += [ ":restore_first_stage_symbols_${_platform}" ]
+ }
+ }
+}
+
+foreach(_platform, target_platform_list) {
+ current_platform_dir = "${product_output_dir}/$_platform"
+ _system_install_info_file =
+ "${current_platform_dir}/system_install_parts.json"
+
+ action_with_pydeps("package_testcase_mlf_${_platform}") {
+ testonly = true
+ deps = [
+ ":${_platform}_parts_list",
+ "//build/core/gn:build_all_test_pkg",
+ ]
+ script = "//build/ohos/packages/resources_collect.py"
+ inputs = [ _system_install_info_file ]
+ output_file = "$target_out_dir/$target_name.out"
+ outputs = [ output_file ]
+ args = [
+ "--collect-type",
+ "module_list_files",
+ "--system-install-info-file",
+ rebase_path(_system_install_info_file, root_build_dir),
+ "--resources-dir-list",
+ "module_list_files",
+ "--package-output-base-dir",
+ rebase_path("$current_platform_dir", root_build_dir),
+ "--output-file",
+ rebase_path(output_file, root_build_dir),
+ ]
+ }
+}
+
+group("package_testcase_mlf") {
+ testonly = true
+ deps = []
+ foreach(_platform, target_platform_list) {
+ deps += [ ":package_testcase_mlf_${_platform}" ]
+ }
+}
+
+foreach(_platform, target_platform_list) {
+ current_platform_dir = "${product_output_dir}/$_platform"
+
+ _system_install_info_file =
+ "${current_platform_dir}/system_install_parts.json"
+
+ action_with_pydeps("package_testcase_${_platform}") {
+ testonly = true
+ deps = [
+ ":${_platform}_parts_list",
+ "//build/core/gn:build_all_test_pkg",
+ ]
+ script = "//build/ohos/packages/resources_collect.py"
+ inputs = [ _system_install_info_file ]
+ output_file = "$target_out_dir/$target_name.out"
+ outputs = [ output_file ]
+
+ test_type_list = [
+ "unittest",
+ "moduletest",
+ "systemtest",
+ "performance",
+ "security",
+ "reliability",
+ "distributedtest",
+ "fuzztest",
+ ]
+
+ resources_dir_list = []
+ foreach(test_type, test_type_list) {
+ resources_dir_list += [ "tests/" + test_type ]
+ }
+
+ args = [
+ "--collect-type",
+ "testcase",
+ "--system-install-info-file",
+ rebase_path(_system_install_info_file, root_build_dir),
+ "--package-output-base-dir",
+ rebase_path("$current_platform_dir/tests", root_build_dir),
+ "--output-file",
+ rebase_path(output_file, root_build_dir),
+ "--resources-dir-list",
+ ]
+ args += resources_dir_list
+ }
+}
+
+foreach(_platform, target_platform_list) {
+ current_platform_dir = "${product_output_dir}/$_platform"
+ required_files = []
+ required_files += [
+ "${build_configs_path}/target_platforms_parts.json",
+ "${build_configs_path}/parts_info/parts_info.json",
+ "${build_configs_path}/infos_for_testfwk.json",
+ ]
+ action_with_pydeps("copy_testfwk_required_files_${_platform}") {
+ testonly = true
+ deps = [ ":package_testcase_${_platform}" ]
+ script = "//build/ohos/copy_files.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ output_file = "$target_out_dir/$target_name.out"
+ outputs = [ output_file ]
+ args = [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--copy-output-dir",
+ rebase_path("$current_platform_dir/tests", root_build_dir),
+ "--outfile",
+ rebase_path(output_file, root_build_dir),
+ "--source-files",
+ ]
+ args += rebase_path(required_files, root_build_dir)
+ }
+}
+
+group("package_testcase") {
+ testonly = true
+ deps = []
+ foreach(_platform, target_platform_list) {
+ deps += [
+ ":copy_testfwk_required_files_${_platform}",
+ ":package_testcase_${_platform}",
+ ]
+ }
+}
diff --git a/dsoftbus/build/ohos/packages/backup_restore_artifact.py b/dsoftbus/build/ohos/packages/backup_restore_artifact.py
new file mode 100755
index 0000000000000000000000000000000000000000..41aa20d9ee6075f5878d6faae9fc349e551eafd5
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/backup_restore_artifact.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import shutil
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--source-dir', nargs='+', help='', required=True)
+ parser.add_argument('--output-dir', help='', required=True)
+ args = parser.parse_args()
+
+ str_out_basename = os.path.basename(args.output_dir)
+ if (str_out_basename == "backup" and not os.path.exists(args.output_dir)):
+ os.mkdir(args.output_dir)
+
+ # move source dir
+ for source in args.source_dir:
+ print(source)
+ if os.path.exists(source):
+ output_path = None
+ if str_out_basename == "backup":
+ output_path = os.path.join(
+ args.output_dir, os.path.basename(source))
+ if str_out_basename == "restore":
+ output_path = args.output_dir.replace(
+ str_out_basename, os.path.basename(source))
+ if str_out_basename == "restore_symbols":
+ asan_symbols_path = args.output_dir.replace(
+ str_out_basename, os.path.basename(source))
+ asan_symbols_backup_path = source.replace(os.path.basename(
+ source), os.path.join(os.path.basename(source), "asan"))
+ shutil.move(asan_symbols_path, asan_symbols_backup_path)
+ output_path = asan_symbols_path
+ if output_path is not None:
+ print("Move:{},To:{}".format(source, output_path))
+ shutil.move(source, output_path)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/gen_required_modules_list.py b/dsoftbus/build/ohos/packages/gen_required_modules_list.py
new file mode 100755
index 0000000000000000000000000000000000000000..5ee2d6aafa14d5d7178365b64520c918066b3e57
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/gen_required_modules_list.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def read_install_info(system_installed_info_file, depfiles):
+ install_module_dict = {}
+ parts_list = _read_file_content(system_installed_info_file)
+ if parts_list is None:
+ raise Exception(
+ "read file '{}' failed.".format(system_installed_info_file))
+ for _part_info in parts_list:
+ is_source = _part_info.get('is_source')
+ if is_source is False:
+ continue
+ part_name = _part_info.get('part_name')
+ part_info_file = _part_info.get('part_info_file')
+ depfiles.append(part_info_file)
+ module_info_file_list = _read_file_and_get_content(
+ part_info_file, 'module_info_file')
+ install_module_list = []
+ for module_info_file in module_info_file_list:
+ module_info = _read_file_content(module_info_file)
+ depfiles.append(module_info_file)
+ install_enable = module_info.get('install_enable')
+ if not install_enable:
+ continue
+ install_module_list.append(module_info)
+ install_module_dict[part_name] = install_module_list
+ return install_module_dict
+
+
+def _read_file_content(input_file):
+ if not os.path.exists(input_file):
+ raise Exception("file '{}' does not exist.".format(input_file))
+ data = read_json_file(input_file)
+ if data is None:
+ raise Exception("read file '{}' failed.".format(input_file))
+ return data
+
+
+def _read_file_and_get_content(input_file, get_arrt_name=None):
+ data = _read_file_content(input_file)
+ result = []
+ for info in data:
+ result.append(info.get(get_arrt_name))
+ return result
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-installed-info-file', required=True)
+ parser.add_argument('--required-install-modules-file', required=True)
+ parser.add_argument('--depfile', required=True)
+ args = parser.parse_args()
+
+ depfiles = [args.system_installed_info_file]
+ install_moudle_dict = read_install_info(args.system_installed_info_file,
+ depfiles)
+ write_json_file(args.required_install_modules_file, install_moudle_dict)
+ build_utils.write_depfile(args.depfile, args.required_install_modules_file,
+ depfiles)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/gen_required_modules_list.pydeps b/dsoftbus/build/ohos/packages/gen_required_modules_list.pydeps
new file mode 100755
index 0000000000000000000000000000000000000000..2ea095fd911cc0cef8f2ece2a7054332d377951b
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/gen_required_modules_list.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/gen_required_modules_list.pydeps build/ohos/packages/gen_required_modules_list.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+gen_required_modules_list.py
diff --git a/dsoftbus/build/ohos/packages/modules_install.py b/dsoftbus/build/ohos/packages/modules_install.py
new file mode 100755
index 0000000000000000000000000000000000000000..8181e47726669f29a3bd0cee05477af99dcd6014
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/modules_install.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file, write_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def _get_modules_info(system_install_info, depfiles):
+ modules_info_dict = {}
+ for subsystem_info in system_install_info:
+ part_name = subsystem_info.get('part_name')
+ part_info_file = subsystem_info.get('part_info_file')
+
+ # read subsystem module info
+ part_install_info = read_json_file(part_info_file)
+ if part_install_info is None:
+ raise Exception(
+ "read part '{}' installed modules info failed.".format(
+ part_name))
+ depfiles.append(part_info_file)
+
+ for info in part_install_info:
+ module_def = info.get('module_def')
+ module_info_file = info.get('module_info_file')
+ depfiles.append(module_info_file)
+ if module_def not in modules_info_dict:
+ modules_info_dict[module_def] = module_info_file
+ return modules_info_dict
+
+
+def _get_post_process_modules_info(post_process_modules_info_files, depfiles):
+ modules_info_list = []
+ for _modules_info_file in post_process_modules_info_files:
+ _modules_info = read_json_file(_modules_info_file)
+ if _modules_info is None:
+ raise Exception("read _modules_info_file '{}' failed.".format(
+ _modules_info_file))
+ modules_info_list.extend(_modules_info)
+ depfiles.append(_modules_info_file)
+ return modules_info_list
+
+
+def copy_modules(system_install_info, install_modules_info_file,
+ modules_info_file, module_list_file,
+ post_process_modules_info_files, platform_installed_path,
+ additional_system_files, depfiles):
+ output_result = []
+ dest_list = []
+ symlink_dest = []
+
+ modules_info_dict = _get_modules_info(system_install_info, depfiles)
+ for value in modules_info_dict.values():
+ module_info = read_json_file(value)
+ if not module_info:
+ raise Exception(
+ "read module install info file '{}' error.".format(value))
+ install = module_info.get('install_enable')
+ if not install:
+ continue
+ output_result.append(module_info)
+
+ # get post process modules info
+ post_process_modules = _get_post_process_modules_info(
+ post_process_modules_info_files, depfiles)
+ for _module_info in post_process_modules:
+ install = _module_info.get('install_enable')
+ if not install:
+ continue
+ output_result.append(_module_info)
+
+ for source, system_path in additional_system_files:
+ shutil.copy(source, os.path.join(platform_installed_path, system_path))
+
+ # copy modules
+ for module_info in output_result:
+ if module_info.get('type') == 'none':
+ continue
+ # copy module lib
+ source = module_info.get('source')
+ dests = module_info.get('dest')
+ # check source
+ if not os.path.exists(source):
+ raise Exception("source '{}' doesn't exist.".format(source))
+ depfiles.append(source)
+ for dest in dests:
+ if dest.startswith('/'):
+ dest = dest[1:]
+ dest_list.append(dest)
+ # dest_dir_prefix
+ dest_dir = os.path.join(platform_installed_path,
+ os.path.dirname(dest))
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir)
+ shutil.copy(source, os.path.join(platform_installed_path, dest))
+
+ # add symlink
+ if 'symlink' in module_info:
+ symlink_dest = module_info.get('symlink')
+ for dest in dests:
+ symlink_src_file = os.path.basename(dest)
+ for name in symlink_dest:
+ symlink_dest_dir = os.path.dirname(dest)
+ symlink_dest_file = os.path.join(platform_installed_path,
+ symlink_dest_dir, name)
+ if not os.path.exists(symlink_dest_file):
+ os.symlink(symlink_src_file, symlink_dest_file)
+
+ # write install module info to file
+ write_json_file(install_modules_info_file, modules_info_dict)
+
+ # write all module info
+ write_json_file(modules_info_file, output_result)
+
+ # output module list to file
+ write_file(module_list_file, '\n'.join(dest_list))
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-install-info-file', required=True)
+ parser.add_argument('--install-modules-info-file', required=True)
+ parser.add_argument('--modules-info-file', required=True)
+ parser.add_argument('--modules-list-file', required=True)
+ parser.add_argument('--platform-installed-path', required=True)
+ parser.add_argument('--system-dir', required=True)
+ parser.add_argument('--sa-profile-extract-dir', required=True)
+ parser.add_argument('--merged-sa-profile', required=True)
+ parser.add_argument('--depfile', required=True)
+ parser.add_argument('--system-image-zipfile', required=True)
+ parser.add_argument(
+ '--additional-system-files',
+ action='append',
+ help=
+ "additional system files is used for files don't have module infos. use with caution"
+ )
+ parser.add_argument('--post-process-modules-info-files',
+ nargs='*',
+ default=[])
+ args = parser.parse_args()
+
+ additional_system_files = []
+ for tuple in args.additional_system_files or []:
+ filepath, system_path = tuple.split(':')
+ additional_system_files.append((filepath, system_path))
+
+ depfiles = []
+ build_utils.extract_all(args.merged_sa_profile,
+ args.sa_profile_extract_dir,
+ no_clobber=False)
+ sa_files = build_utils.get_all_files(args.sa_profile_extract_dir)
+
+ system_install_info = read_json_file(args.system_install_info_file)
+ if system_install_info is None:
+ raise Exception("read file '{}' failed.".format(
+ args.system_install_info_file))
+
+ system_install_base_dir = args.system_dir
+ if os.path.exists(system_install_base_dir):
+ shutil.rmtree(system_install_base_dir)
+ print('remove system dir...')
+ os.makedirs(system_install_base_dir)
+
+ vendor_install_base_dir = os.path.join(args.platform_installed_path,
+ 'vendor')
+ if os.path.exists(vendor_install_base_dir):
+ shutil.rmtree(vendor_install_base_dir)
+ print('remove vendor dir...')
+
+ updater_install_base_dir = os.path.join(args.platform_installed_path,
+ 'updater')
+ if os.path.exists(updater_install_base_dir):
+ shutil.rmtree(updater_install_base_dir)
+ print('remove updater dir...')
+
+ print('copy modules...')
+ copy_modules(system_install_info, args.install_modules_info_file,
+ args.modules_info_file, args.modules_list_file,
+ args.post_process_modules_info_files,
+ args.platform_installed_path, additional_system_files,
+ depfiles)
+
+ if os.path.exists(args.system_image_zipfile):
+ os.unlink(args.system_image_zipfile)
+ build_utils.zip_dir(args.system_image_zipfile, args.system_dir)
+ depfiles.extend([item for item in depfiles if item not in sa_files])
+ build_utils.write_depfile(args.depfile, args.install_modules_info_file,
+ depfiles)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/modules_install.pydeps b/dsoftbus/build/ohos/packages/modules_install.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..644441d2fe95d330a347b27bd36fa341b5dab9f6
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/modules_install.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/modules_install.pydeps build/ohos/packages/modules_install.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+modules_install.py
diff --git a/dsoftbus/build/ohos/packages/parts_install_info.py b/dsoftbus/build/ohos/packages/parts_install_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..a8119d558757d09c0e06c7d5d7f9be00681d13d7
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/parts_install_info.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def get_platform_parts(current_platform, platforms_parts_file):
+ all_platforms_parts = read_json_file(platforms_parts_file)
+ if all_platforms_parts is None:
+ raise Exception("read file '{}' failed.".format(all_platforms_parts))
+ platform_parts = all_platforms_parts.get(current_platform)
+ return platform_parts
+
+
+def platform_parts(all_parts_info_file, current_platform_parts):
+ _parts_list = []
+ all_parts_info = read_json_file(all_parts_info_file)
+ if all_parts_info is None:
+ raise Exception("read file '{}' failed.".format(all_parts_info_file))
+
+ for part_name in current_platform_parts.keys():
+ if part_name not in all_parts_info:
+ raise Exception(
+ "required part '{}' doesn't exist.".format(part_name))
+ _parts_list.append(all_parts_info.get(part_name))
+ return _parts_list
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--all-parts-info-file', required=True)
+ parser.add_argument('--platforms-parts-file', required=True)
+ parser.add_argument('--system-install-info-file', required=True)
+ parser.add_argument('--current-platform', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ current_platform_parts = get_platform_parts(args.current_platform,
+ args.platforms_parts_file)
+
+ _parts_list = platform_parts(args.all_parts_info_file,
+ current_platform_parts)
+
+ platform_out_dir = os.path.dirname(args.system_install_info_file)
+ if not os.path.exists(platform_out_dir):
+ os.makedirs(platform_out_dir, exist_ok=True)
+ write_json_file(args.system_install_info_file, _parts_list)
+
+ if args.depfile:
+ _dep_files = []
+ _dep_files.append(args.all_parts_info_file)
+ _dep_files.append(args.platforms_parts_file)
+ build_utils.write_depfile(args.depfile,
+ args.system_install_info_file,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/parts_install_info.pydeps b/dsoftbus/build/ohos/packages/parts_install_info.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..9a3fc9cdc59c5d82037f86cc0da664edfd8dca46
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/parts_install_info.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/parts_install_info.pydeps build/ohos/packages/parts_install_info.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+parts_install_info.py
diff --git a/dsoftbus/build/ohos/packages/platforms_install_info.py b/dsoftbus/build/ohos/packages/platforms_install_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..c3422da8ce25b135bdbd3889d7a69a308d48c8f1
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/platforms_install_info.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+
+def subsystem_list(all_subsystem_info_file, curr_subsystem_name_list_file):
+ curr_subsystem_list = []
+ all_subsystem_info = read_json_file(all_subsystem_info_file)
+ if all_subsystem_info is None:
+ raise Exception(
+ "read file '{}' failed.".format(all_subsystem_info_file))
+ curr_subsystem_name_list = read_json_file(curr_subsystem_name_list_file)
+ if curr_subsystem_name_list is None:
+ raise Exception(
+ "read file '{}' failed.".format(curr_subsystem_name_list_file))
+ for _subsystem_name in curr_subsystem_name_list:
+ if _subsystem_name not in all_subsystem_info:
+ continue
+ else:
+ curr_subsystem_list.append(all_subsystem_info.get(_subsystem_name))
+ return curr_subsystem_list
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--target-platform-parts', required=True)
+ parser.add_argument('--parts-install-info-file', required=True)
+ parser.add_argument('--current-platform', required=True)
+ args = parser.parse_args()
+
+ curr_subsystem_list = subsystem_list(args.all_subsystem_info_file,
+ args.target_platform_subsystem)
+ platform_out_dir = os.path.dirname(args.system_install_info_file)
+ if not os.path.exists(platform_out_dir):
+ os.makedirs(platform_out_dir)
+ write_json_file(args.system_install_info_file, curr_subsystem_list)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/resources_collect.py b/dsoftbus/build/ohos/packages/resources_collect.py
new file mode 100755
index 0000000000000000000000000000000000000000..e7a0ef53db7e0153fbcc4669e28c7c126d7eb197
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/resources_collect.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+
+def get_src_parts(system_install_info_file):
+ parts_info_list = read_json_file(system_install_info_file)
+ if parts_info_list is None:
+ raise Exception(
+ "read file '{}' failed.".format(system_install_info_file))
+ src_parts_info = {}
+ for part_info in parts_info_list:
+ if 'is_source' not in part_info:
+ raise Exception("read part info error, missing key is_source.")
+ if part_info.get('is_source') is True:
+ part_name = part_info.get('part_name')
+ src_parts_info[part_name] = part_info
+ return src_parts_info
+
+
+def _collect_testcase(resources_dir, parts_info, output_base_dir):
+ for part_name, part_info in parts_info.items():
+ origin_part_name = part_info.get('origin_part_name')
+ if part_info.get('variant_name') != 'phone':
+ _toolchain = part_info.get('toolchain_label')
+ _toolchain_name = _toolchain.split(':')[1]
+ _variant_dir = os.path.join(_toolchain_name, resources_dir,
+ origin_part_name)
+ else:
+ _variant_dir = os.path.join(resources_dir, part_name)
+ if not os.path.exists(_variant_dir):
+ continue
+
+ _src_dir_name = os.path.basename(resources_dir)
+ package_output_dir = os.path.join(output_base_dir, _src_dir_name)
+ dest_dir = os.path.join(package_output_dir, origin_part_name)
+ if os.path.exists(dest_dir):
+ shutil.rmtree(dest_dir)
+ shutil.copytree(_variant_dir, dest_dir)
+
+
+def collect_testcase(resources_dir_list, parts_info, output_base_dir):
+ for resources_dir in resources_dir_list:
+ _collect_testcase(resources_dir, parts_info, output_base_dir)
+
+
+def _collect_test_mlf(resources_dir, parts_info, output_base_dir):
+ for part_name, part_info in parts_info.items():
+ origin_part_name = part_info.get('origin_part_name')
+ if part_info.get('variant_name') != 'phone':
+ _toolchain = part_info.get('toolchain_label')
+ _toolchain_name = _toolchain.split(':')[1]
+ _variant_dir = os.path.join(_toolchain_name, resources_dir,
+ origin_part_name)
+ else:
+ _variant_dir = os.path.join(resources_dir, part_name)
+ if not os.path.exists(_variant_dir):
+ continue
+
+ _src_dir_name = os.path.basename(resources_dir)
+ package_output_dir = os.path.join(output_base_dir, _src_dir_name)
+ dest_dir = os.path.join(package_output_dir, origin_part_name)
+ if os.path.exists(dest_dir):
+ shutil.rmtree(dest_dir)
+ shutil.copytree(_variant_dir, dest_dir)
+
+
+def collect_test_mlf(resources_dir_list, parts_info, output_base_dir):
+ for resources_dir in resources_dir_list:
+ _collect_test_mlf(resources_dir, parts_info, output_base_dir)
+
+
+def _collect_libs_symbols(resources_dir, parts_info, package_output_base_dir):
+ for part_name, part_info in parts_info.items():
+ origin_part_name = part_info.get('origin_part_name')
+ if part_info.get('variant_name') != 'phone':
+ _toolchain = part_info.get('toolchain_label')
+ _toolchain_name = _toolchain.split(':')[1]
+ _variant_dir = os.path.join(_toolchain_name, resources_dir,
+ _toolchain_name,
+ part_info.get('subsystem_name'),
+ origin_part_name)
+ else:
+ _variant_dir = os.path.join(resources_dir,
+ part_info.get('subsystem_name'),
+ part_name)
+ if not os.path.exists(_variant_dir):
+ continue
+
+ _src_dir_name = os.path.basename(resources_dir)
+ package_output_dir = os.path.join(package_output_base_dir,
+ _src_dir_name)
+ dest_dir = os.path.join(package_output_dir, origin_part_name)
+ if os.path.exists(dest_dir):
+ shutil.rmtree(dest_dir)
+ shutil.copytree(_variant_dir, dest_dir)
+
+
+def collect_libs_symbols(resources_dir_list, parts_info, output_base_dir):
+ for resources_dir in resources_dir_list:
+ _collect_libs_symbols(resources_dir, parts_info, output_base_dir)
+
+
+def _collect_libs_java(resources_dir, parts_info, output_base_dir):
+ parts_libs_java_path = {}
+ for part_name, part_info in parts_info.items():
+ origin_part_name = part_info.get('origin_part_name')
+ if part_info.get('variant_name') != 'phone':
+ _toolchain = part_info.get('toolchain_label')
+ _toolchain_name = _toolchain.split(':')[1]
+ _variant_dir = os.path.join(_toolchain_name, resources_dir,
+ part_info.get('subsystem_name'),
+ origin_part_name)
+ else:
+ _variant_dir = os.path.join(resources_dir,
+ part_info.get('subsystem_name'),
+ part_name)
+ if not os.path.exists(_variant_dir):
+ continue
+
+ parts_libs_java_path[origin_part_name] = _variant_dir
+ _libs_java_path_info_file = os.path.join(output_base_dir,
+ 'libs_java_info.json')
+ write_json_file(_libs_java_path_info_file, parts_libs_java_path)
+
+
+def collect_libs_java(resources_dir_list, parts_info, output_base_dir):
+ for resources_dir in resources_dir_list:
+ _collect_libs_java(resources_dir, parts_info, output_base_dir)
+
+
+def _collect_kits_modules(resources_dir, parts_info, output_base_dir):
+ parts_modules_path = {}
+ for part_name, part_info in parts_info.items():
+ part_modules_dir = os.path.join(resources_dir, part_name)
+ if not os.path.exists(part_modules_dir):
+ continue
+ origin_part_name = part_info.get('origin_part_name')
+ parts_modules_path[origin_part_name] = part_modules_dir
+ _modules_path_info_file = os.path.join(output_base_dir,
+ 'kits_modules_info.json')
+ write_json_file(_modules_path_info_file, parts_modules_path)
+
+
+def collect_kits_modules(resources_dir_list, parts_info, output_base_dir):
+ for resources_dir in resources_dir_list:
+ _collect_kits_modules(resources_dir, parts_info, output_base_dir)
+
+
+def collect_innerkits_modules(resources_dir_list, parts_info, output_base_dir):
+ for resources_dir in resources_dir_list:
+ _collect_kits_modules(resources_dir, parts_info, output_base_dir)
+
+
+def collect_resources(collect_type, resources_dir_list, parts_info,
+ output_base_dir):
+ if collect_type == 'libs_symbols':
+ collect_libs_symbols(resources_dir_list, parts_info, output_base_dir)
+ elif collect_type == 'module_list_files':
+ collect_test_mlf(resources_dir_list, parts_info, output_base_dir)
+ elif collect_type == 'testcase':
+ collect_testcase(resources_dir_list, parts_info, output_base_dir)
+ elif collect_type == 'kits_modules':
+ collect_kits_modules(resources_dir_list, parts_info, output_base_dir)
+ elif collect_type == 'innerkits_modules':
+ collect_innerkits_modules(resources_dir_list, parts_info,
+ output_base_dir)
+ elif collect_type == 'libs_java':
+ collect_libs_java(resources_dir_list, parts_info, output_base_dir)
+ else:
+ raise Exception("Unsupported resource type.")
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--collect-type', required=True)
+ parser.add_argument('--system-install-info-file', required=True)
+ parser.add_argument('--resources-dir-list', nargs='+', default=[])
+ parser.add_argument('--package-output-base-dir', required=True)
+ parser.add_argument('--output-file', required=True)
+ args = parser.parse_args()
+
+ parts_info = get_src_parts(args.system_install_info_file)
+
+ collect_resources(args.collect_type, args.resources_dir_list, parts_info,
+ args.package_output_base_dir)
+ write_json_file(args.output_file, '{}')
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/resources_collect.pydeps b/dsoftbus/build/ohos/packages/resources_collect.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..13fd0ac49fdaf38ae75255a6764ae04753dc962c
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/resources_collect.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/resources_collect.pydeps build/ohos/packages/resources_collect.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/file_utils.py
+resources_collect.py
diff --git a/dsoftbus/build/ohos/packages/system_gzip_package.py b/dsoftbus/build/ohos/packages/system_gzip_package.py
new file mode 100755
index 0000000000000000000000000000000000000000..def054d5862701f26e4452609c8a8cd84dde10cc
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/system_gzip_package.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import shutil
+import tarfile
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+
+
+def merge_image_files(src_image_path, dest_image_path):
+ if not os.path.exists(src_image_path):
+ raise Exception(
+ "source image path {} does not exist.".format(src_image_path))
+
+ for root, dirs, files in os.walk(src_image_path):
+ for dir_name in dirs:
+ src_dir = os.path.join(root, dir_name)
+ dir_relpath = os.path.relpath(src_dir, src_image_path)
+ dest_dir_path = os.path.join(dest_image_path, dir_relpath)
+ if not os.path.exists(dest_dir_path):
+ os.makedirs(dest_dir_path)
+
+ for file_name in files:
+ src_file_path = os.path.join(root, file_name)
+ file_relpath = os.path.relpath(src_file_path, src_image_path)
+ dest_file_path = os.path.join(dest_image_path, file_relpath)
+ if not os.path.exists(dest_file_path):
+ if not os.path.exists(os.path.dirname(dest_file_path)):
+ os.makedirs(os.path.dirname(dest_file_path))
+ shutil.copy2(src_file_path, dest_file_path)
+
+
+def compress_image_files(package_dir, output_file, additional_files):
+ # Compress the image folder
+ files = [package_dir] + additional_files
+ with tarfile.open(output_file, "w:gz") as tar:
+ for f in files:
+ if os.path.exists(f):
+ try:
+ # additional files will be packed outside of system dir.
+ if f in additional_files:
+ tar.add(f, arcname=os.path.basename(f))
+ else:
+ tar.add(f, arcname='system')
+ except OSError as ioerr:
+ print("Compress file failed. Error code: {}".format(
+ ioerr.errno))
+ except:
+ print("Unexpected error")
+ raise
+
+
+def main(argv):
+ argv = build_utils.expand_file_args(argv)
+ parser = argparse.ArgumentParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_argument('--image-dir', help='', required=True)
+ parser.add_argument("--system-image-zipfile", required=True)
+ parser.add_argument('--output-file', help='', required=True)
+ parser.add_argument('--additional-files', help='', action='append')
+ args = parser.parse_args(argv[1:])
+
+ depfiles = [args.system_image_zipfile] + args.additional_files
+ with build_utils.temp_dir() as img_dir:
+ build_utils.extract_all(args.system_image_zipfile,
+ img_dir,
+ no_clobber=True)
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: compress_image_files(img_dir, args.output_file, args.
+ additional_files),
+ args,
+ depfile_deps=depfiles,
+ input_paths=depfiles,
+ output_paths=([args.output_file]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/dsoftbus/build/ohos/packages/system_gzip_package.pydeps b/dsoftbus/build/ohos/packages/system_gzip_package.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..c84a1f41c9d620df5fb085dd0ade1cdfbf5c6cc3
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/system_gzip_package.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/system_gzip_package.pydeps build/ohos/packages/system_gzip_package.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+system_gzip_package.py
diff --git a/dsoftbus/build/ohos/packages/system_notice_info.py b/dsoftbus/build/ohos/packages/system_notice_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..6466e6232b0544676baf549cc0df457e17ea3f8d
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/system_notice_info.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+import re
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def _get_label_name(label):
+ return re.split('[:|()]', label)[1]
+
+
+def _read_notice_info_file(subsystem_notice_info_dir, platform_base_dir):
+ subsystem_notice_info_list = []
+ subsystem_notice_files = []
+ _file_list = os.listdir(subsystem_notice_info_dir)
+ for _info_file in _file_list:
+ _info_file_path = os.path.join(subsystem_notice_info_dir, _info_file)
+ if not os.path.isfile(_info_file_path):
+ continue
+ if not _info_file.endswith('_notice_file'):
+ continue
+ subsystem_notice_info_list.append(_info_file_path)
+ _nf_src_list = read_json_file(_info_file_path)
+ for _nf_src in _nf_src_list:
+ _dest = os.path.join(platform_base_dir, os.path.dirname(_nf_src))
+ if not os.path.exists(_dest):
+ os.makedirs(_dest, exist_ok=True)
+ shutil.copy2(_nf_src, _dest)
+ _dest_file = os.path.relpath(
+ os.path.join(_dest, os.path.basename(_nf_src)))
+ subsystem_notice_files.append(_dest_file)
+ return subsystem_notice_info_list, subsystem_notice_files
+
+
+def get_notice_info(system_install_info_file, notice_info_dir,
+ platform_base_dir):
+ system_install_info = read_json_file(system_install_info_file)
+ if system_install_info is None:
+ raise Exception(
+ "read file '{}' failed.".format(system_install_info_file))
+
+ src_subsystem_list = []
+ for subsystem_info in system_install_info:
+ if subsystem_info.get('is_source') is False:
+ continue
+ part_label = subsystem_info.get('part_label')
+ part_name = _get_label_name(part_label)
+ src_subsystem_list.append(part_name)
+
+ notice_info_file_list = []
+ system_notice_files = []
+ for subsystem_name in src_subsystem_list:
+ subsystem_notice_info_dir = os.path.join(notice_info_dir,
+ subsystem_name)
+ if not os.path.exists(subsystem_notice_info_dir):
+ continue
+ subsystem_notice_info_list, subsystem_notice_files = _read_notice_info_file(
+ subsystem_notice_info_dir, platform_base_dir)
+ notice_info_file_list.extend(subsystem_notice_info_list)
+ system_notice_files.extend(subsystem_notice_files)
+ return notice_info_file_list, system_notice_files
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-install-info-file', required=True)
+ parser.add_argument('--notice-file-info-dir', required=True)
+ parser.add_argument('--platform-base-dir', required=True)
+ parser.add_argument('--output-file', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ _dep_files = [args.system_install_info_file]
+ notice_info_file_list, system_notice_files = get_notice_info(
+ args.system_install_info_file, args.notice_file_info_dir,
+ args.platform_base_dir)
+
+ _dep_files.extend(notice_info_file_list)
+ write_json_file(args.output_file, system_notice_files)
+
+ if args.depfile:
+ list.sort(_dep_files)
+ build_utils.write_depfile(args.depfile,
+ args.output_file,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/packages/system_notice_info.pydeps b/dsoftbus/build/ohos/packages/system_notice_info.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..a461632ffb8b6d5b862c597b9d80db4b4935ac4c
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/system_notice_info.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/system_notice_info.pydeps build/ohos/packages/system_notice_info.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+system_notice_info.py
diff --git a/dsoftbus/build/ohos/packages/system_z_package.py b/dsoftbus/build/ohos/packages/system_z_package.py
new file mode 100755
index 0000000000000000000000000000000000000000..832a00441ad1aa369032c7fd6fc771527c2dcfcf
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/system_z_package.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import shutil
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+from scripts.util import file_utils # noqa: E402
+
+
+def archive_files(system_image_zipfile, additional_files, output_dir,
+ output_file):
+ if not os.path.exists(output_dir):
+ os.makedirs(output_dir, exist_ok=True)
+
+ build_utils.extract_all(system_image_zipfile, output_dir + "/system", no_clobber=False)
+ for _file in additional_files:
+ _dest = os.path.join(output_dir, os.path.basename(_file))
+ if os.path.isdir(_file):
+ if os.path.exists(_dest):
+ shutil.rmtree(_dest)
+ shutil.copytree(_file, _dest)
+ else:
+ shutil.copy2(_file, _dest)
+
+ files_list = []
+ for root, _, files in os.walk(output_dir):
+ for _file in files:
+ files_list.append(os.path.join(root, _file))
+ file_utils.write_file(output_file, '\n'.join(files_list))
+
+
+def main(argv):
+ argv = build_utils.expand_file_args(argv)
+ parser = argparse.ArgumentParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_argument("--system-image-zipfile", required=True)
+ parser.add_argument('--output-dir', required=True)
+ parser.add_argument('--output-file', required=True)
+ parser.add_argument('--additional-files', action='append')
+ args = parser.parse_args(argv[1:])
+
+ depfiles = [args.system_image_zipfile] + args.additional_files
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: archive_files(args.system_image_zipfile, args.additional_files,
+ args.output_dir, args.output_file),
+ args,
+ depfile_deps=depfiles,
+ input_paths=depfiles,
+ output_paths=([args.output_file, args.output_dir]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/dsoftbus/build/ohos/packages/system_z_package.pydeps b/dsoftbus/build/ohos/packages/system_z_package.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..2dcbce76b20beadce8691ec80964f7c037d84cd1
--- /dev/null
+++ b/dsoftbus/build/ohos/packages/system_z_package.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/packages --output build/ohos/packages/system_z_package.pydeps build/ohos/packages/system_z_package.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+system_z_package.py
diff --git a/dsoftbus/build/ohos/sa_profile/BUILD.gn b/dsoftbus/build/ohos/sa_profile/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..f5270cb2ce4ade6cf360a8aaf05306142571bab2
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/BUILD.gn
@@ -0,0 +1,45 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/build_var.gni")
+
+_sa_profile_info_file = "$root_out_dir/src_sa_infos_tmp.json"
+generated_file("generated_src_sa_profile") {
+ deps = [ "//build/ohos/common:generate_src_installed_info" ]
+
+ outputs = [ _sa_profile_info_file ]
+ data_keys = [ "sa_install_info" ]
+ output_conversion = "json"
+}
+
+action_with_pydeps("src_sa_infos_process") {
+ deps = [ ":generated_src_sa_profile" ]
+ script = "//build/ohos/sa_profile/src_sa_profile_process.py"
+ sa_profile_src_infos_file = "${product_output_dir}/src_sa_infos.json"
+
+ # variants-toolchain-file from //build/ohos/build_var.gni
+ sources = [ _sa_profile_info_file ]
+ outputs = [ sa_profile_src_infos_file ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--sa-profile-infos-file",
+ rebase_path(_sa_profile_info_file, root_build_dir),
+ "--variants-toolchain-file",
+ rebase_path(variants_toolchain_file, root_build_dir),
+ "--output-file",
+ rebase_path(sa_profile_src_infos_file, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+}
diff --git a/dsoftbus/build/ohos/sa_profile/sa_info_process/__init__.py b/dsoftbus/build/ohos/sa_profile/sa_info_process/__init__.py
new file mode 100755
index 0000000000000000000000000000000000000000..d7b3a083706fb60581a81c2a917e927139f61f7f
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_info_process/__init__.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/dsoftbus/build/ohos/sa_profile/sa_info_process/merge_sa_info.py b/dsoftbus/build/ohos/sa_profile/sa_info_process/merge_sa_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..bc15f8a5206f6ef8c427dbfff269bb1ca858ddca
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_info_process/merge_sa_info.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import xml.etree.ElementTree as ET
+import ntpath
+import shutil
+import tempfile
+import os
+import logging
+import sys
+
+sys.path.append(os.path.dirname(os.path.abspath(__file__)))
+from sort_sa_by_bootphase import SARearrangement
+from sa_info_config_errors import *
+
+
+class SAInfoMerger(object):
+ INDENT_SPACES = ' ' * 4
+
+ class SAInfoCollector(object):
+ """
+ Class for collecting sa info pieces shared with same process name
+ """
+ def __init__(self, process_name, wdir):
+ self.process_name = process_name
+ self.loadlibs = []
+ self.systemabilities = []
+ self.wdir = wdir
+
+ @property
+ def output_filename(self):
+ basename = self.process_name + '.xml'
+ return os.path.join(self.wdir, basename)
+
+ def add_libpath_info(self, libpath):
+ """
+ A libpath shared by multiple SAs in a process should be added once
+ """
+ if libpath not in self.loadlibs:
+ self.loadlibs.append(libpath)
+
+ def add_systemability_info(self, systemability):
+ self.systemabilities += systemability
+
+ def merge_sa_info(self):
+ """
+ Write all pieces of sa info shared with same process to a new file
+ """
+ DECLARATION = '\n'
+ ROOT_OPEN_TAG = '\n'
+ ROOT_ClOSE_TAG = ''
+ # add declaration and root open tag
+ xml_lines = [DECLARATION, ROOT_OPEN_TAG]
+ # add process
+ process_line = SAInfoMerger.INDENT_SPACES + '{}\n'
+ xml_lines.append(process_line.format(self.process_name))
+ # add libpath
+ xml_lines.append(SAInfoMerger.INDENT_SPACES + '\n')
+ xml_lines += list(self.loadlibs)
+ xml_lines.append(SAInfoMerger.INDENT_SPACES + '\n')
+ # add systemability
+ xml_lines += self.systemabilities
+ # add root close tag
+ xml_lines.append(ROOT_ClOSE_TAG)
+
+ # write file to temporary directory
+ with open(self.output_filename, 'w', encoding='utf-8') as xml_files:
+ for line in xml_lines:
+ xml_files.write(line)
+
+ def __init__(self, is_64bit_arch):
+ self.process_sas_dict = {}
+ self.output_filelist = []
+ self.is_64bit_arch = is_64bit_arch
+
+ def __add_to_output_filelist(self, file):
+ self.output_filelist.append(os.path.join(self.output_dir, file))
+
+ def __parse_xml_file(self, source_file):
+ parser = ET.XMLParser()
+ tree = ET.parse(source_file, parser)
+ root = tree.getroot()
+
+ # check root tag
+ if root.tag == 'profile':
+ FORMAT = 'bad root <{}> tag, new format is expected'
+ # the is old format, and should not be used anymore
+ raise BadFormatXMLError(FORMAT.format(root.tag), source_file)
+ elif root.tag != 'info':
+ # other profile files whose tag name don't equal to 'info' should
+ # just left intact, e.g.
+ basename = ntpath.basename(source_file)
+ dest_file = os.path.join(self.output_dir, basename)
+ shutil.copyfile(source_file, dest_file)
+ self.__add_to_output_filelist(basename)
+
+ # emit a warning to let user know it if there exists a typo
+ FORMAT = '"{}" is not merged, for it\'s root tag is "{}"'
+ logging.warning(FORMAT.format(source_file, root.tag))
+ return
+
+ FORMAT = 'one and only one {} tag is expected, actually {} is found'
+ # check process tag
+ process_nodes = root.findall('process')
+ process_nodes_count = len(process_nodes)
+ if process_nodes_count != 1:
+ raise BadFormatXMLError(FORMAT.format('',
+ process_nodes_count), source_file)
+ else:
+ # ensure that the value of is valid
+ process_name = process_nodes[0].text
+ if process_name is None or process_name.strip() == '':
+ raise BadFormatXMLError('provide a valid value for ',
+ source_file)
+ process_name = process_name.strip()
+ if self.process_sas_dict.get(process_name) is None:
+ # create a new collector if a new process tag is found
+ sa_info_collector = self.SAInfoCollector(process_name,
+ self.temp_dir)
+ self.process_sas_dict[process_name] = sa_info_collector
+ self.__add_to_output_filelist(process_name + '.xml')
+ else:
+ sa_info_collector = self.process_sas_dict[process_name]
+
+ # check libpath tag
+ libpath_nodes = root.findall('systemability/libpath')
+ libpath_nodes_count = len(libpath_nodes)
+ if libpath_nodes_count != 1:
+ raise BadFormatXMLError(FORMAT.format('',
+ libpath_nodes_count), source_file)
+ else:
+ libpath = libpath_nodes[0].text.strip()
+ libname = ntpath.basename(libpath)
+ # [Temporary scheme] no additional process for 64-bit arch and
+ # a libpath without prefixed directory
+ if not self.is_64bit_arch and libname != libpath:
+ libpath = os.path.join("/system/lib", libname)
+ libpath_nodes[0].text = libpath
+ reconstructed_str = '{}\n'.format(libpath)
+ # fix weird indent problem after converting the node to string
+ string_repr = self.INDENT_SPACES * 2 + reconstructed_str
+ sa_info_collector.add_libpath_info(string_repr)
+
+ # check systemability tag
+ systemability_nodes = root.findall('systemability')
+ sa_nodes_count = len(systemability_nodes)
+ self.sa_nodes_count = sa_nodes_count
+ if sa_nodes_count != 1:
+ raise BadFormatXMLError(FORMAT.format('',
+ sa_nodes_count), source_file)
+ else:
+ byte_repr = ET.tostring(systemability_nodes[0], encoding='utf-8')
+ # fix weird indent problem after converting the node to string
+ string_repr = self.INDENT_SPACES + byte_repr.decode('utf-8')
+ # fix newline problem if the has tailling comment
+ fixed_string_repr = string_repr.rstrip() + '\n'
+ sa_info_collector.add_systemability_info([fixed_string_repr])
+
+ def __merge(self, sa_info_filelist, output_dir):
+ """
+ Iterate process_sas_dict and call it's merge_sa_info method to
+ merge systemability info by process name
+ """
+ self.output_dir = output_dir
+
+ # collect systemability info by process
+ for source_file in sa_info_filelist:
+ self.__parse_xml_file(source_file)
+
+ global_ordered_systemability_names = []
+ global_systemability_deps_dict = {}
+ # merge systemability info for each process
+ for process, collector in self.process_sas_dict.items():
+ rearragement = SARearrangement()
+ # do the merge
+ collector.merge_sa_info()
+ # sort sa by bootphase and denpendency
+ merged_file = collector.output_filename
+ dest_file = os.path.join(output_dir, ntpath.basename(merged_file))
+ rearragement.sort(merged_file, dest_file)
+ # get deps info for later detecting globally circular dependency use
+ deps_info = rearragement.get_deps_info()
+ global_ordered_systemability_names += deps_info[0]
+ global_systemability_deps_dict.update(deps_info[1])
+
+ # detect possible cross-process circular dependency
+ try:
+ SARearrangement.detect_invalid_dependency_globally(
+ global_ordered_systemability_names,
+ global_systemability_deps_dict)
+ except CircularDependencyError as e:
+ for file in self.output_filelist:
+ try:
+ os.remove(file)
+ except OSError:
+ pass
+ raise CrossProcessCircularDependencyError(e)
+
+ # finally return an output filelist
+ return self.output_filelist
+
+ def merge(self, sa_info_filelist, output_dir):
+ with tempfile.TemporaryDirectory(dir='./') as temp_dir:
+ self.temp_dir = temp_dir
+ return self.__merge(sa_info_filelist, output_dir)
diff --git a/dsoftbus/build/ohos/sa_profile/sa_info_process/sa_info_config_errors.py b/dsoftbus/build/ohos/sa_profile/sa_info_process/sa_info_config_errors.py
new file mode 100755
index 0000000000000000000000000000000000000000..319937a2b5d55827de61953cd44755858f4be967
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_info_process/sa_info_config_errors.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+class CircularDependencyError(Exception):
+ pass
+
+
+class CrossProcessCircularDependencyError(Exception):
+ pass
+
+
+class InvertDependencyError(Exception):
+ pass
+
+
+class RunOnCreateDependencyError(Exception):
+ pass
+
+
+class NotSupportedBootphaseError(Exception):
+ pass
+
+
+class SystemAbilityNameConflictError(Exception):
+ pass
+
+
+class BadFormatXMLError(Exception):
+ def __init__(self, message, file_in_process):
+ super().__init__(message, file_in_process)
+ self.message = message
+ self.file_in_process = file_in_process
+
+ def __str__(self):
+ return "In xml file: {}, {}".format(self.file_in_process, self.message)
diff --git a/dsoftbus/build/ohos/sa_profile/sa_info_process/sort_sa_by_bootphase.py b/dsoftbus/build/ohos/sa_profile/sa_info_process/sort_sa_by_bootphase.py
new file mode 100755
index 0000000000000000000000000000000000000000..72df164b69dfb515899b6fea460a07c25ab4f508
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_info_process/sort_sa_by_bootphase.py
@@ -0,0 +1,415 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import xml.etree.ElementTree as ET
+import re
+import sys
+import copy
+from enum import Enum
+
+from sa_info_config_errors import *
+
+
+class RearrangementPolicy(object):
+ BOOT_START_PHASE = "BootStartPhase"
+ CORE_START_PHASE = "CoreStartPhase"
+ DEFAULT_START_PHASE = "OthersStartPhase"
+
+ rearrange_category_order = (
+ BOOT_START_PHASE,
+ CORE_START_PHASE,
+ DEFAULT_START_PHASE
+ )
+
+ bootphase_priority_table = {
+ BOOT_START_PHASE: 3,
+ CORE_START_PHASE: 2,
+ DEFAULT_START_PHASE: 1
+ }
+
+ def __init__(self):
+ self.bootphase_categories = {
+ RearrangementPolicy.BOOT_START_PHASE: [],
+ RearrangementPolicy.CORE_START_PHASE: [],
+ RearrangementPolicy.DEFAULT_START_PHASE: []
+ }
+
+
+class SARearrangement(object):
+ def __init__(self):
+ self.rearranged_systemabilities = []
+ self.ordered_systemability_names = []
+ self.name_node_dict = {}
+ self.systemability_deps_dict = {}
+ self.bootphase_dict = {}
+ self.creation_dict = {}
+ self.policy = RearrangementPolicy()
+ self.sanode_start_idx = 0
+ self.systemability_nodes = []
+ self.xml_lines = []
+ self.xml_lines_range = {}
+ self.sa_nodes_count = 0
+ self.first_sanode_start_lineno = 0
+ self.last_sanode_end_lineno = 0
+
+ def __read_xml_file_into_lines(self, source_file):
+ """
+ Read xml file into lines, and map systemability node into ranges with
+ begin line and end line
+ """
+ class Status(Enum):
+ expect_sa_tag_begin = 1
+ expect_name_value = 2
+ expect_sa_tag_end = 3
+
+ def find_systemability_name(line):
+ regexp = r"^[\t\s]*(.*)"
+ matches = re.search(regexp, line)
+ if matches is not None:
+ name = matches.group(1).strip()
+ return name
+ return ""
+
+ with open(source_file, "r", encoding="utf-8") as xml_file:
+ self.xml_lines = xml_file.readlines()
+
+ sa_tag_begin = r"^[\t\s]*\ tag found in xml file
+ pass
+
+ def __rearrange_systemability_node_nonstrict(self, dest_file):
+ """
+ Rearrange systemability nodes in this way will change the original
+ format of source file
+ """
+ # remove old systemability nodes
+ for systemability_node in self.systemability_nodes:
+ self.root.remove(systemability_node)
+
+ # insert the rearranged systemability nodes
+ for idx in range(len(self.rearranged_systemabilities)):
+ name = self.rearranged_systemabilities[idx]
+ self.root.insert(self.sanode_start_idx + idx,
+ self.name_node_dict[name])
+ tree = ET.ElementTree(self.root)
+ tree.write(dest_file, encoding="utf-8", xml_declaration=True)
+
+ def __rearrange_systemability_node_strict(self, dest_file):
+ """
+ Rearrange systemability nodes in this way preserve the original
+ format of source file
+ """
+ if self.first_sanode_start_lineno != 0:
+ rearranged_lines = self.xml_lines[:self.first_sanode_start_lineno]
+ for name in self.rearranged_systemabilities:
+ ranges = self.xml_lines_range[name]
+ rearranged_lines += self.xml_lines[ranges[0]:ranges[1] + 1]
+ rearranged_lines += self.xml_lines[self.last_sanode_end_lineno + 1:]
+ else:
+ rearranged_lines = self.xml_lines
+
+ with open(dest_file, "w", encoding="utf-8") as xml_files:
+ for line in rearranged_lines:
+ xml_files.write(line)
+
+ def __sort_systemability_by_bootphase_priority(self):
+ def inner_category_sort(systemabilities):
+ """
+ Sort dependencies with same bootphase category, preserve the
+ original order in source file
+ """
+ systemabilities_ = systemabilities[:]
+ for systemability in systemabilities_:
+ dependencies = self.systemability_deps_dict[systemability]
+ for dependency in dependencies:
+ # should update idx_self each iteration
+ idx_self = systemabilities.index(systemability)
+ try:
+ idx_dep = systemabilities.index(dependency)
+ # if the dependency is behind, then exchange the order
+ if idx_self < idx_dep:
+ tmp = systemabilities[idx_dep]
+ systemabilities[idx_dep] = systemabilities[idx_self]
+ systemabilities[idx_self] = tmp
+ except ValueError:
+ pass # ignore different category of dependencies
+
+ # put the systemability nodes into different categories
+ for systemability_name in self.ordered_systemability_names:
+ bootphase = self.bootphase_dict[systemability_name]
+ salist = self.policy.bootphase_categories[bootphase]
+ salist.append(systemability_name)
+
+ # sort the systemability nodes according to RearrangementPolicy
+ for category in RearrangementPolicy.rearrange_category_order:
+ salist = self.policy.bootphase_categories[category]
+ inner_category_sort(salist)
+ self.rearranged_systemabilities += salist
+
+ def __detect_invert_dependency(self, systemability, depend):
+ """
+ Detect invert dependency: systemability with high boot priority depends
+ on systemability with low ones, e.g. a systemability named 'sa1' with
+ BootStartPhase priority depends on a systemability named 'sa2' with
+ CoreStartPhase
+ """
+ FORMAT = ("Bad dependency found: the {} with high priority " +
+ "depends on a {} with low one")
+ self_idx = self.bootphase_dict[systemability]
+ # The depend may be in other process
+ dep_idx = self.bootphase_dict.get(depend)
+ if dep_idx is None:
+ return
+ self_priority = RearrangementPolicy.bootphase_priority_table[self_idx]
+ depend_priority = RearrangementPolicy.bootphase_priority_table[dep_idx]
+ if self_priority > depend_priority:
+ raise InvertDependencyError(
+ FORMAT.format(systemability, depend))
+
+ def __detect_creation_dependency(self, systemability, depend):
+ """
+ Detect dependency related to configuration on :
+ if a sa with set to 'true' depending on a sa
+ with 'false', then a RunOnCreateDependencyError will be thrown
+ """
+ FORMAT = ("Bad dependency found: the {} with run-on-create " +
+ "depends on a {} with run-on-demand")
+ self_creation = self.creation_dict.get(systemability)
+ dep_creation = self.creation_dict.get(depend)
+ if self_creation == "true" and dep_creation == "false":
+ raise RunOnCreateDependencyError(FORMAT.format(systemability, depend))
+
+ @classmethod
+ def __detect_invalid_dependency(cls, dependency_checkers,
+ ordered_sa_names, sa_deps_dict):
+ """
+ Iterate over the dependency tree, to detect whether there
+ exists circular dependency and other kinds bad dependency
+ """
+ deps_visit_cnt = {}
+ ordered_systemability_names = ordered_sa_names
+ systemability_deps_dict = sa_deps_dict
+
+ def init_dependencies_visit_counter():
+ for name in ordered_systemability_names:
+ deps_visit_cnt[name] = 0
+
+ def do_check(systemability, dependency):
+ """
+ Check other kind dependency problem
+ """
+ for checker in dependency_checkers:
+ checker(systemability, dependency)
+
+ init_dependencies_visit_counter()
+ for systemability in ordered_systemability_names:
+ depend_path = []
+ depend_path.append(systemability)
+ while len(depend_path) != 0:
+ cur_systemability = depend_path[-1]
+ # the cur_systemability may be in a different process, thus can't
+ # find it's dependency info
+ dependencies = systemability_deps_dict.get(cur_systemability)
+ if dependencies is None:
+ dependencies = []
+ deps_count = len(dependencies)
+ if deps_count == 0:
+ depend_path.pop()
+ else:
+ if deps_visit_cnt[cur_systemability] < deps_count:
+ index = deps_visit_cnt[cur_systemability]
+ cur_dependency = dependencies[index]
+ # execute other kind dependency checkers right here
+ do_check(cur_systemability, cur_dependency)
+ try:
+ depend_path.index(cur_dependency)
+ depend_path.append(cur_dependency)
+ FORMAT = "A circular dependency found: {}"
+ route = "->".join(map(str, depend_path))
+ raise CircularDependencyError(FORMAT.format(route))
+ except ValueError:
+ depend_path.append(cur_dependency)
+ deps_visit_cnt[cur_systemability] += 1
+ else:
+ # pop the systemability in process if it's all
+ # dependencies have been visited
+ depend_path.pop()
+
+ def __extract_info_from_systemability_nodes(self):
+ """
+ Extract info like dependencies and bootphase from a systemability node
+ """
+ def validate_bootphase(bootphase, nodename):
+ FORMAT = ("In systemability: {}, The bootphase '{}' is not supported " +
+ "please check yourself")
+ if self.policy.bootphase_categories.get(bootphase) is None:
+ raise NotSupportedBootphaseError(FORMAT.format(nodename, bootphase))
+
+ def validate_creation(creation, nodename):
+ FORMAT = ("In tag <{}> only a boolean value is expected, " +
+ "but actually is '{}'")
+ if creation not in ["true", "false"]:
+ raise BadFormatXMLError(FORMAT.format("run-on-create", creation),
+ self.file_in_process)
+
+ def validate_systemability_name(nodename):
+ if not nodename.isdigit() or nodename.startswith("0"):
+ FORMAT = ("'s value should be non-zeros leading " +
+ "digits, but actually is {}")
+ raise BadFormatXMLError(FORMAT.format(nodename),
+ self.file_in_process)
+
+ def check_nodes_constraints(systemability_node, tag, ranges):
+ """
+ The number of a given node should be in a valid range
+ """
+ FORMAT = "The tag <{}> should be in range {}, but actually {} is found"
+ tags_nodes = systemability_node.findall(tag)
+ node_cnt = len(tags_nodes)
+ if node_cnt < ranges[0] or node_cnt > ranges[1]:
+ raise BadFormatXMLError(FORMAT.format(tag, ranges, node_cnt),
+ self.file_in_process)
+ return tags_nodes
+
+ def strip_node_value(tag, name):
+ """
+ Check empty or None tag value
+ """
+ FORMAT = "The tag <{}>'s value cannot be empty, but actually is {}"
+ if tag.text is None or tag.text.strip() == '':
+ raise BadFormatXMLError(FORMAT.format(name, tag.text),
+ self.file_in_process)
+ return tag.text.strip()
+
+ default_bootphase = RearrangementPolicy.DEFAULT_START_PHASE
+ for systemability_node in self.systemability_nodes:
+ # Required one and only one is expected
+ name_node = check_nodes_constraints(systemability_node,
+ "name", (1, 1))[0]
+ nodename = strip_node_value(name_node, "name")
+ validate_systemability_name(nodename)
+
+ try:
+ self.ordered_systemability_names.index(nodename)
+ raise SystemAbilityNameConflictError(nodename)
+ except ValueError:
+ self.ordered_systemability_names.append(nodename)
+ self.name_node_dict[nodename] = copy.deepcopy(systemability_node)
+ self.systemability_deps_dict[nodename] = []
+ self.bootphase_dict[nodename] = default_bootphase
+
+ # Optional zero or one are both accepted
+ bootphase_nodes = check_nodes_constraints(systemability_node,
+ "bootphase", (0, 1))
+ if len(bootphase_nodes) == 1:
+ bootphase_value = strip_node_value(bootphase_nodes[0],
+ "bootphase")
+ validate_bootphase(bootphase_value, nodename)
+ self.bootphase_dict[nodename] = bootphase_value
+
+ # Required one and only one is expected
+ runoncreate_node = check_nodes_constraints(systemability_node,
+ "run-on-create", (1, 1))[0]
+ runoncreate_value = strip_node_value(runoncreate_node,
+ "run-on-create")
+ validate_creation(runoncreate_value, nodename)
+ self.creation_dict[nodename] = runoncreate_value
+
+ # Optional
+ depend_nodes = check_nodes_constraints(systemability_node,
+ "depend", (0, sys.maxsize))
+ for depend_node in depend_nodes:
+ depend_value = strip_node_value(depend_node, "depend")
+ deps = self.systemability_deps_dict[nodename]
+ deps.append(depend_value)
+
+ def sort(self, source_file, dest_file):
+ self.file_in_process = source_file
+ dependency_checkers = []
+ dependency_checkers.append(self.__detect_invert_dependency)
+ dependency_checkers.append(self.__detect_creation_dependency)
+
+ self.__parse_xml_file(source_file)
+ self.__extract_info_from_systemability_nodes()
+ self.__read_xml_file_into_lines(source_file)
+ self.__detect_invalid_dependency(dependency_checkers,
+ self.ordered_systemability_names,
+ self.systemability_deps_dict)
+ self.__sort_systemability_by_bootphase_priority()
+ self.__rearrange_systemability_node_strict(dest_file)
+
+ @classmethod
+ def detect_invalid_dependency_globally(cls,
+ global_ordered_systemability_names,
+ global_systemability_deps_dict):
+ dependency_checkers = []
+ cls.__detect_invalid_dependency(dependency_checkers,
+ global_ordered_systemability_names,
+ global_systemability_deps_dict)
+
+ def get_deps_info(self):
+ """
+ Returns systemabilities and their dependencies for later detecting
+ possible globally circular dependency problem
+ """
+ return [self.ordered_systemability_names, self.systemability_deps_dict]
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile.gni b/dsoftbus/build/ohos/sa_profile/sa_profile.gni
new file mode 100755
index 0000000000000000000000000000000000000000..11f21e4ba30a17edcb0eb2e20d80748dcc906c80
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile.gni
@@ -0,0 +1,226 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/build_var.gni")
+import("//build/ohos_var.gni")
+
+# sa info config file template
+# support for configuring multiple files.
+template("ohos_sa_profile") {
+ assert(defined(invoker.sources))
+ assert(defined(invoker.subsystem_name) || defined(invoker.part_name))
+
+ if (defined(invoker.part_name)) {
+ part_name = invoker.part_name
+ } else {
+ part_name = invoker.subsystem_name
+ }
+
+ inputs_output_dir = "${root_out_dir}/sa_profile/inputs"
+
+ copy("${target_name}_copy") {
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ sources = invoker.sources
+ outputs = [ "${target_out_dir}/profiles/{{source_file_part}}" ]
+ }
+
+ action_with_pydeps(target_name) {
+ deps = [ ":${target_name}_copy" ]
+ _output_dir = "${inputs_output_dir}/${part_name}"
+ _output_info_file = "${_output_dir}/${target_name}_info.json"
+ sources = get_target_outputs(":${target_name}_copy")
+ outputs = [ _output_info_file ]
+ script = "//build/ohos/sa_profile/sa_profile.py"
+ args = [ "--sa-input-files" ]
+ args += rebase_path(sources, root_build_dir)
+ args += [
+ "--target-label",
+ get_label_info(":${target_name}", "label_with_toolchain"),
+ "--output-module-info-file",
+ rebase_path(_output_info_file, root_build_dir),
+ "--part-name",
+ part_name,
+ ]
+ sa_install_info = {
+ label = get_label_info(":$target_name", "label_with_toolchain")
+ install_info_file = rebase_path(_output_info_file, root_build_dir)
+ part_name = part_name
+ toolchain = current_toolchain
+ type = "sa"
+ }
+ metadata = {
+ sa_install_info = [ sa_install_info ]
+ }
+ }
+}
+
+# merge sa profile insall files and generate sa profile install info
+template("ohos_sa_install_info") {
+ assert(defined(invoker.system_install_info_file))
+ assert(defined(invoker.sa_install_info_file))
+ assert(defined(invoker.current_platform))
+ assert(defined(invoker.current_platform_dir))
+
+ forward_variables_from(invoker,
+ [
+ "current_platform",
+ "current_platform_dir",
+ "system_install_info_file",
+ "sa_install_info_file",
+ "merged_sa_profile_dir",
+ "merged_sa_profile_zipfile",
+ ])
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+ _sa_info_out_dir = "${current_platform_dir}/sa_profile"
+ binary_output_dir = "${_sa_info_out_dir}/binaries"
+
+ sa_profile_install_dir = "profile"
+
+ archive_info_file_name = "sa_modules_info.json"
+ sa_profile_archive_dir = "//${dist_dir_name}/sa_profiles"
+ sa_profile_archive_info_file =
+ "${sa_profile_archive_dir}/${archive_info_file_name}"
+
+ src_sa_install_info_file = "${_sa_info_out_dir}/src_sa_install_info.json"
+
+ action_with_pydeps("sa_profile_src_${current_platform}") {
+ deps = [ "//build/ohos/sa_profile:src_sa_infos_process" ]
+ deps += _deps
+ script = "//build/ohos/sa_profile/sa_profile_source.py"
+ src_sa_infos_file = "${product_output_dir}/src_sa_infos.json"
+ sources = [
+ src_sa_infos_file,
+ system_install_info_file,
+ ]
+ outputs = [ src_sa_install_info_file ]
+ args = [
+ "--src-sa-info-file",
+ rebase_path(src_sa_infos_file, root_build_dir),
+ "--system-install-info-file",
+ rebase_path(system_install_info_file, root_build_dir),
+ "--src-sa-install-info-file",
+ rebase_path(src_sa_install_info_file, root_build_dir),
+ ]
+ }
+
+ _sa_profile_binary_target = "sa_profile_binary_${current_platform}"
+ _binary_sa_output = "${_sa_info_out_dir}/${_sa_profile_binary_target}.zip"
+ action_with_pydeps(_sa_profile_binary_target) {
+ deps = _deps
+ deps += [ ":sa_profile_src_${current_platform}" ]
+ script = "//build/ohos/sa_profile/sa_profile_binary.py"
+ inputs = [ system_install_info_file ]
+ outputs = [ _binary_sa_output ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--system-install-info-file",
+ rebase_path(system_install_info_file, root_build_dir),
+ "--sa-output-dir",
+ rebase_path(binary_output_dir, root_build_dir),
+ "--sa-output-zipfile",
+ rebase_path(_binary_sa_output, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+
+ # Check if sa archive info file exists
+ _file_exists_script = "//build/ohos/file_exists.py"
+ _process_args = [
+ "--filename",
+ rebase_path(sa_profile_archive_info_file, root_build_dir),
+ ]
+ _result = exec_script(_file_exists_script, _process_args, "string")
+ if (_result == "True") {
+ inputs += [ sa_profile_archive_info_file ]
+ args += [
+ "--sa-profile-archive-info-file",
+ rebase_path(sa_profile_archive_info_file, root_build_dir),
+ ]
+ }
+ }
+
+ action_with_pydeps(target_name) {
+ deps = [
+ ":sa_profile_binary_${current_platform}",
+ ":sa_profile_src_${current_platform}",
+ ]
+ script = "//build/ohos/sa_profile/sa_profile_merge.py"
+ sources = [
+ _binary_sa_output,
+ src_sa_install_info_file,
+ ]
+ outputs = [
+ sa_install_info_file,
+ merged_sa_profile_zipfile,
+ ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--src-sa-install-info-file",
+ rebase_path(src_sa_install_info_file, root_build_dir),
+ "--no-src-sa-install-info-file",
+ rebase_path(_binary_sa_output, root_build_dir),
+ "--sa-output-dir",
+ rebase_path(merged_sa_profile_dir, root_build_dir),
+ "--merged-sa-profile",
+ rebase_path(merged_sa_profile_zipfile, root_build_dir),
+ "--sa-install-info-file",
+ rebase_path(sa_install_info_file, root_build_dir),
+ "--sa-info-install-dest-dir",
+ "${system_base_dir}/${sa_profile_install_dir}",
+ "--target-cpu",
+ target_cpu,
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+ }
+}
+
+template("ohos_sa_info_archive") {
+ archive_info_file_name = "sa_modules_info.json"
+ _deps = [ "//build/ohos/sa_profile:src_sa_infos_process" ]
+ sa_profile_src_infos_file = "${product_output_dir}/src_sa_infos.json"
+
+ action_with_pydeps(target_name) {
+ deps = _deps
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ inputs = [ sa_profile_src_infos_file ]
+ depfile = "$target_gen_dir/$target_name.d"
+ sa_archive_output_dir = "${dist_build_out_dir}/sa_profiles"
+ sa_archive_info_file = "${sa_archive_output_dir}/${archive_info_file_name}"
+ outputs = [
+ sa_archive_info_file,
+ sa_archive_output_dir,
+ ]
+ script = "//build/ohos/sa_profile/sa_profile_archive.py"
+ args = [
+ "--src-sa-install-info-file",
+ rebase_path(sa_profile_src_infos_file, root_build_dir),
+ "--sa-archive-output-dir",
+ rebase_path(sa_archive_output_dir, root_build_dir),
+ "--sa-archive-info-file",
+ rebase_path(sa_archive_info_file, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+ }
+}
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile.py b/dsoftbus/build/ohos/sa_profile/sa_profile.py
new file mode 100755
index 0000000000000000000000000000000000000000..fb72d3b5eb6538001cf7b57acdcd5ae48776132e
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_json_file # noqa: E402
+
+
+def _generate_info_file(part_name, target_label, sa_info_files,
+ out_info_file):
+ info = {
+ 'part_name': part_name,
+ 'label': target_label,
+ 'sa_info_files': sa_info_files
+ }
+ write_json_file(out_info_file, info)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--sa-input-files', nargs='+', required=True)
+ parser.add_argument('--target-label', required=True)
+ parser.add_argument('--output-module-info-file', required=True)
+ parser.add_argument('--part-name', required=True)
+ args = parser.parse_args()
+ _generate_info_file(args.part_name, args.target_label,
+ args.sa_input_files, args.output_module_info_file)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile.pydeps b/dsoftbus/build/ohos/sa_profile/sa_profile.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..1f8bb2f0780940da5f71b60363af28682103f3dc
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sa_profile --output build/ohos/sa_profile/sa_profile.pydeps build/ohos/sa_profile/sa_profile.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/file_utils.py
+sa_profile.py
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_archive.py b/dsoftbus/build/ohos/sa_profile/sa_profile_archive.py
new file mode 100755
index 0000000000000000000000000000000000000000..d95a3c823b952a0013ac6742c9363b7f2c93e94c
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_archive.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import shutil
+import argparse
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_json_file, read_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def _get_src_sa_info(src_sa_install_info_file, depfiles):
+ src_sa_install_info = read_json_file(src_sa_install_info_file)
+ if src_sa_install_info is None:
+ raise Exception("read src_sa_install_info_file failed.")
+ install_info_file_dict = {}
+ for _install_info in src_sa_install_info:
+ if _install_info.get('type') == 'sa':
+ part_name = _install_info.get('part_name')
+ _install_infos = install_info_file_dict.get(part_name)
+ if _install_infos is None:
+ _install_infos = []
+ _install_infos.append(_install_info.get('install_info_file'))
+ install_info_file_dict[part_name] = _install_infos
+
+ all_sa_info_files_dict = {}
+ for part_name, _install_info_files in install_info_file_dict.items():
+ for _install_info_file in _install_info_files:
+ _install_info = read_json_file(_install_info_file)
+ if _install_info is None:
+ raise Exception("read install_info_file '{}' failed.".format(
+ _install_info_file))
+ depfiles.append(_install_info_file)
+ sa_info_files = _install_info.get('sa_info_files')
+ _file_list = all_sa_info_files_dict.get(part_name)
+ if _file_list is None:
+ all_sa_info_files_dict[part_name] = sa_info_files
+ else:
+ _file_list.extend(sa_info_files)
+ all_sa_info_files_dict[part_name] = _file_list
+ return all_sa_info_files_dict
+
+
+def _file_archive(all_sa_info_files_dict, archive_output_dir,
+ archive_info_file, depfiles):
+ info_dict = {}
+ _info_file_dir = os.path.dirname(archive_info_file)
+ _relative_path = os.path.relpath(_info_file_dir, archive_output_dir)
+ for key, value in all_sa_info_files_dict.items():
+ subsystem_out_dir = os.path.join(archive_output_dir, key)
+ if not os.path.exists(subsystem_out_dir):
+ os.makedirs(subsystem_out_dir, exist_ok=True)
+ _file_list = []
+ for _file in value:
+ depfiles.append(_file)
+ shutil.copy2(_file, subsystem_out_dir)
+ _file_list.append(
+ os.path.join(_relative_path, key, os.path.basename(_file)))
+ info_dict[key] = _file_list
+ write_json_file(archive_info_file, info_dict)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--src-sa-install-info-file', required=True)
+ parser.add_argument('--sa-archive-output-dir', required=True)
+ parser.add_argument('--sa-archive-info-file', required=True)
+ parser.add_argument('--depfile', required=True)
+ args = parser.parse_args()
+
+ depfiles = []
+ all_sa_info_files_dict = _get_src_sa_info(args.src_sa_install_info_file,
+ depfiles)
+
+ _file_archive(all_sa_info_files_dict, args.sa_archive_output_dir,
+ args.sa_archive_info_file, depfiles)
+ build_utils.write_depfile(args.depfile, args.sa_archive_info_file, depfiles)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_archive.pydeps b/dsoftbus/build/ohos/sa_profile/sa_profile_archive.pydeps
new file mode 100755
index 0000000000000000000000000000000000000000..3887e17aa7e7f8134cc67bfb3f54245bf80cfc7c
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_archive.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sa_profile --output build/ohos/sa_profile/sa_profile_archive.pydeps build/ohos/sa_profile/sa_profile_archive.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+sa_profile_archive.py
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_binary.py b/dsoftbus/build/ohos/sa_profile/sa_profile_binary.py
new file mode 100755
index 0000000000000000000000000000000000000000..7e9b251535190a68ca1c915e3b0dd53e7f12b592
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_binary.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import shutil
+import argparse
+import zipfile
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def get_no_src_parts(system_install_info_file):
+ install_parts_info = read_json_file(system_install_info_file)
+ if install_parts_info is None:
+ raise Exception("read no src subsystem info file failed.")
+ parts_name_list = []
+ for info in install_parts_info:
+ if 'is_source' not in info:
+ raise Exception(
+ "read subsystem info error, missing key is_source.")
+ if info.get('is_source') is True:
+ continue
+ if 'part_name' not in info:
+ raise Exception("read part info error, missing key part_name.")
+
+ part_name = info.get('part_name')
+ parts_name_list.append(part_name)
+ return parts_name_list
+
+
+def generate_binary_sa_archive(parts_list, sa_profile_archive_info_file,
+ sa_output_dir, depfiles):
+ if not os.path.exists(sa_output_dir):
+ os.makedirs(sa_output_dir, exist_ok=True)
+ sa_tuples = []
+ subsystem_sa_archive_dir = os.path.dirname(sa_profile_archive_info_file)
+ if not os.path.exists(subsystem_sa_archive_dir):
+ return sa_tuples
+ if not os.path.exists(sa_profile_archive_info_file):
+ print("warning: sa profile archive info file does not exist.")
+ return sa_tuples
+
+ sa_profile_archive_info = read_json_file(sa_profile_archive_info_file)
+ if sa_profile_archive_info is None:
+ raise Exception("read sa profile archive info file failed.")
+
+ for _part_name in parts_list:
+ _sa_file_list = sa_profile_archive_info.get(_part_name)
+ if _sa_file_list is None:
+ continue
+ for _sa_file in _sa_file_list:
+ _sa_file_path = os.path.join(subsystem_sa_archive_dir, _sa_file)
+ sa_tuples.append((_sa_file_path,
+ os.path.relpath(_sa_file_path,
+ subsystem_sa_archive_dir)))
+ depfiles.append(_sa_file_path)
+ return sa_tuples
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-install-info-file', required=True)
+ parser.add_argument('--sa-profile-archive-info-file', required=False)
+ parser.add_argument('--sa-output-dir', required=True)
+ parser.add_argument('--sa-output-zipfile', required=True)
+ parser.add_argument('--depfile', required=True)
+ args = parser.parse_args()
+
+ sa_files_list = []
+ depfiles = []
+ sa_files_tuples = []
+ if args.sa_profile_archive_info_file:
+ depfiles.append(args.sa_profile_archive_info_file)
+ parts_list = get_no_src_parts(args.system_install_info_file)
+ sa_files_tuples.extend(
+ generate_binary_sa_archive(parts_list,
+ args.sa_profile_archive_info_file,
+ args.sa_output_dir, depfiles))
+ with zipfile.ZipFile(args.sa_output_zipfile, 'w') as outfile:
+ for sa_file_path, sa_file in sa_files_tuples:
+ build_utils.add_to_zip_hermetic(outfile,
+ sa_file,
+ src_path=sa_file_path)
+ build_utils.write_depfile(args.depfile, args.sa_output_zipfile, depfiles)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_binary.pydeps b/dsoftbus/build/ohos/sa_profile/sa_profile_binary.pydeps
new file mode 100755
index 0000000000000000000000000000000000000000..5ab6f756f93e490e0fecb79ff3fa0994d825239a
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_binary.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sa_profile --output build/ohos/sa_profile/sa_profile_binary.pydeps build/ohos/sa_profile/sa_profile_binary.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+sa_profile_binary.py
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_merge.py b/dsoftbus/build/ohos/sa_profile/sa_profile_merge.py
new file mode 100755
index 0000000000000000000000000000000000000000..c1f5430d39bf1d9a2a8a946448232f51cee97860
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_merge.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import os
+import sys
+import shutil
+
+from sa_info_process.merge_sa_info import SAInfoMerger
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_json_file, \
+ read_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def _get_src_sa_info(src_sa_install_info_file, depfiles):
+ src_sa_install_info = read_json_file(src_sa_install_info_file)
+ if src_sa_install_info is None:
+ raise Exception("read src_sa_install_info_file failed.")
+ install_info_file_list = []
+ for _install_info in src_sa_install_info:
+ if _install_info.get('type') == 'sa':
+ install_info_file_list.append(
+ _install_info.get('install_info_file'))
+
+ depfiles.extend(install_info_file_list)
+ all_sa_input_files = []
+ for _install_info_file in install_info_file_list:
+ _install_info = read_json_file(_install_info_file)
+ if _install_info is None:
+ raise Exception("read install_info_file '{}' failed.".format(
+ _install_info_file))
+ sa_info_files = _install_info.get('sa_info_files')
+ all_sa_input_files.extend(sa_info_files)
+ depfiles.extend(all_sa_input_files)
+ return all_sa_input_files
+
+
+def _sa_profile_merge(sa_input_files, no_src_subsystem_sa_zipfile,
+ merge_out_dir, merged_zipfile, target_cpu):
+ with build_utils.temp_dir() as tmp:
+ build_utils.extract_all(no_src_subsystem_sa_zipfile, tmp)
+ for root, _, files in os.walk(tmp):
+ for sa_file in files:
+ sa_input_files.append(os.path.join(root, sa_file))
+
+ if not os.path.exists(merge_out_dir):
+ os.makedirs(merge_out_dir, exist_ok=True)
+
+ is_64bit_arch = target_cpu not in ["arm", "x86"]
+ # call merge tool
+ merge_tool = SAInfoMerger(is_64bit_arch)
+ result_file_list = merge_tool.merge(sorted(sa_input_files),
+ merge_out_dir)
+ build_utils.zip_dir(merged_zipfile, merge_out_dir)
+ shutil.rmtree(merge_out_dir)
+ return result_file_list
+
+
+def _generate_install_info(sa_result_file_list, sa_info_install_dest_dir,
+ sa_install_info_file):
+ module_install_info_list = []
+ for _sa_file in sa_result_file_list:
+ _install_dest = os.path.join(sa_info_install_dest_dir,
+ os.path.basename(_sa_file))
+ module_install_info = {
+ 'type': 'sa_info',
+ 'source': _sa_file,
+ 'install_enable': True,
+ 'dest': [ _install_dest ]
+ }
+ module_install_info_list.append(module_install_info)
+ write_json_file(sa_install_info_file, module_install_info_list)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--src-sa-install-info-file', required=True)
+ parser.add_argument('--no-src-sa-install-info-file', required=True)
+ parser.add_argument('--sa-output-dir', required=True)
+ parser.add_argument('--merged-sa-profile', required=True)
+ parser.add_argument('--sa-install-info-file', required=True)
+ parser.add_argument('--sa-info-install-dest-dir', required=True)
+ parser.add_argument('--target-cpu', default='arm64')
+ parser.add_argument('--depfile', required=True)
+ args = parser.parse_args()
+
+ depfiles = []
+ src_subsystem_file_list = _get_src_sa_info(args.src_sa_install_info_file,
+ depfiles)
+ no_src_sa_profile_zip = args.no_src_sa_install_info_file
+
+ result_file_list = _sa_profile_merge(src_subsystem_file_list,
+ no_src_sa_profile_zip,
+ args.sa_output_dir,
+ args.merged_sa_profile,
+ args.target_cpu)
+ _generate_install_info(result_file_list, args.sa_info_install_dest_dir,
+ args.sa_install_info_file)
+ build_utils.write_depfile(
+ args.depfile, args.sa_install_info_file, depfiles)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_merge.pydeps b/dsoftbus/build/ohos/sa_profile/sa_profile_merge.pydeps
new file mode 100755
index 0000000000000000000000000000000000000000..c7e484be1d106685a27d3a02be5cc83ea26df44f
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_merge.pydeps
@@ -0,0 +1,14 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sa_profile --output build/ohos/sa_profile/sa_profile_merge.pydeps build/ohos/sa_profile/sa_profile_merge.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+sa_info_process/__init__.py
+sa_info_process/merge_sa_info.py
+sa_info_process/sa_info_config_errors.py
+sa_info_process/sort_sa_by_bootphase.py
+sa_profile_merge.py
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_source.py b/dsoftbus/build/ohos/sa_profile/sa_profile_source.py
new file mode 100755
index 0000000000000000000000000000000000000000..386da629ffc549f1694828f4554554d79be8b0a2
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_source.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+
+def get_src_parts(system_install_info_file):
+ parts_list = read_json_file(system_install_info_file)
+ if parts_list is None:
+ raise Exception("read file '{}' failed.".format(system_install_info_file))
+ src_parts_info = {}
+ for _part_info in parts_list:
+ if 'is_source' not in _part_info:
+ raise Exception(
+ "read subsystem info error, missing key is_source.")
+ if _part_info.get('is_source') is True:
+ part_name = _part_info.get('part_name')
+ origin_part_name = _part_info.get('origin_part_name')
+ src_parts_info[part_name] = origin_part_name
+ return src_parts_info
+
+
+def get_sa_install_info(sa_info_file, src_parts_info):
+ sa_infos = read_json_file(sa_info_file)
+ if sa_infos is None:
+ raise Exception("read file '{}' failed.".format(sa_info_file))
+ sa_install_infos = []
+ for _sa_info in sa_infos:
+ _sa_info_part = _sa_info.get('part_name')
+ if _sa_info_part in src_parts_info:
+ sa_install_infos.append(_sa_info)
+ return sa_install_infos
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-install-info-file', required=True)
+ parser.add_argument('--src-sa-info-file', required=False)
+ parser.add_argument('--src-sa-install-info-file', required=True)
+ args = parser.parse_args()
+
+ src_parts_info = get_src_parts(args.system_install_info_file)
+ sa_install_infos = get_sa_install_info(args.src_sa_info_file,
+ src_parts_info)
+ write_json_file(args.src_sa_install_info_file, sa_install_infos)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sa_profile/sa_profile_source.pydeps b/dsoftbus/build/ohos/sa_profile/sa_profile_source.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..7b0087bbbbd8545fa6a78d4298ada0ab3a3c7458
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/sa_profile_source.pydeps
@@ -0,0 +1,6 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sa_profile --output build/ohos/sa_profile/sa_profile_source.pydeps build/ohos/sa_profile/sa_profile_source.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/file_utils.py
+sa_profile_source.py
diff --git a/dsoftbus/build/ohos/sa_profile/src_sa_profile_process.py b/dsoftbus/build/ohos/sa_profile/src_sa_profile_process.py
new file mode 100755
index 0000000000000000000000000000000000000000..d8568d63307b00d3f14a90ef65c077162ae2883b
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/src_sa_profile_process.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def sa_info_process(sa_profile_infos_file, toolchain_platform):
+ sa_profile_infos = read_json_file(sa_profile_infos_file)
+ if sa_profile_infos is None:
+ raise Exception("read file '{}' failed.".format(sa_profile_infos))
+ processed_sa_infos = []
+ for _sa_info in sa_profile_infos:
+ origin_part_name = _sa_info.get('part_name')
+ if origin_part_name is None or origin_part_name == '':
+ raise Exception(
+ "get part name failed in sa info, part_name='{}'".format(
+ origin_part_name))
+ _toolchain = _sa_info.get('toolchain')
+ platform_name = toolchain_platform.get(_toolchain)
+ if platform_name != 'phone':
+ part_name = '{}_{}'.format(origin_part_name, platform_name)
+ else:
+ part_name = origin_part_name
+ _sa_info['part_name'] = part_name
+ _sa_info['origin_part_name'] = origin_part_name
+ processed_sa_infos.append(_sa_info)
+ return processed_sa_infos
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--variants-toolchain-file', required=True)
+ parser.add_argument('--sa-profile-infos-file', required=False)
+ parser.add_argument('--output-file', required=True)
+ parser.add_argument('--depfile', required=True)
+ args = parser.parse_args()
+
+ depfiles = [args.variants_toolchain_file]
+ variants_toolchain_info = read_json_file(args.variants_toolchain_file)
+ if variants_toolchain_info is None:
+ raise Exception(
+ "read file '{}' failed.".format(args.variants_toolchain_file))
+ toolchain_platform = variants_toolchain_info.get('toolchain_platform')
+ processed_sa_infos = sa_info_process(args.sa_profile_infos_file,
+ toolchain_platform)
+ write_json_file(args.output_file, processed_sa_infos)
+ build_utils.write_depfile(args.depfile, args.output_file, depfiles)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sa_profile/src_sa_profile_process.pydeps b/dsoftbus/build/ohos/sa_profile/src_sa_profile_process.pydeps
new file mode 100755
index 0000000000000000000000000000000000000000..0da37f08895932a58cbcb51d1e104236bd04490e
--- /dev/null
+++ b/dsoftbus/build/ohos/sa_profile/src_sa_profile_process.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sa_profile --output build/ohos/sa_profile/src_sa_profile_process.pydeps build/ohos/sa_profile/src_sa_profile_process.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+src_sa_profile_process.py
diff --git a/dsoftbus/build/ohos/sdk/BUILD.gn b/dsoftbus/build/ohos/sdk/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..d447bde099e6d10fb14a74118419ddd561fd90f6
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/BUILD.gn
@@ -0,0 +1,186 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/ohos/build_var.gni")
+import("//build/ohos/sdk/sdk.gni")
+import("${build_configs_path}/platforms_list.gni")
+
+parse_script = "//build/ohos/sdk/parse_sdk_description.py"
+ohos_sdk_description_file = "//build/ohos/sdk/ohos_sdk_description_std.json"
+
+generated_sdk_modules_gni = "${generated_files_dir}/ohos_sdk_modules.gni"
+generated_sdk_types_file = "${generated_files_dir}/generated_sdk_types.txt"
+
+parse_args = [
+ "--sdk-description-file",
+ rebase_path(ohos_sdk_description_file, root_build_dir),
+ "--sdk-modules-gni",
+ rebase_path(generated_sdk_modules_gni, root_build_dir),
+ "--sdk-install-info-file",
+ rebase_path(generated_sdk_module_install_paths, root_build_dir),
+ "--sdk-types-file",
+ rebase_path(generated_sdk_types_file, root_build_dir),
+ "--base-platform",
+ "phone",
+ "--variant-to-product",
+ rebase_path("./variant_to_product.json", root_build_dir),
+ "--source-root-dir",
+ rebase_path("//", root_build_dir),
+]
+foreach(p, target_platform_list) {
+ parse_args += [
+ "--platforms",
+ p,
+ ]
+}
+
+exec_script(parse_script, parse_args)
+
+sdk_types = read_file(generated_sdk_types_file, "list lines")
+
+import("${generated_sdk_modules_gni}")
+
+generate_all_types_sdk_script = "//build/ohos/sdk/generate_all_types_sdk.py"
+generated_build_gn = "$generated_files_dir/BUILD.gn"
+generate_args = [
+ "--sdk-modules",
+ "ohos_sdk_modules",
+ "--output",
+ rebase_path(generated_build_gn, root_build_dir),
+ "--current-dir",
+ get_path_info(sdk_base_build_gn, "dir"),
+ "--generated-sdk-modules-gni",
+ generated_sdk_modules_gni,
+ "--type2displayname",
+ rebase_path("./type_to_display_name.json", root_build_dir),
+ "--api-version",
+ api_version,
+ "--release-type",
+ release_type,
+ "--meta-version",
+ meta_version,
+]
+
+foreach(os, sdk_systems) {
+ generate_args += [
+ "--sdk-systems",
+ os,
+ ]
+}
+
+foreach(sdk_type, sdk_types) {
+ generate_args += [
+ "--sdk-types",
+ sdk_type,
+ ]
+}
+
+exec_script(generate_all_types_sdk_script, generate_args)
+
+action_with_pydeps("generate_sdk_notice") {
+ deps = [ "${generated_files_dir}:generated_ohos_sdk" ]
+ inputs = []
+
+ script = "//build/ohos/notice/merge_notice_files.py"
+ depfile = "$target_gen_dir/$target_name.d"
+ _install_path = "${target_out_dir}/SDK_NOTICE_FILES/"
+
+ outputs = [
+ sdk_notice_txt,
+ sdk_notice_gz,
+ ]
+ args = [
+ "--image-name",
+ "sdk",
+ "--notice-root-dir",
+ rebase_path(sdk_notice_dir, root_build_dir),
+ "--output-notice-txt",
+ rebase_path(sdk_notice_txt, root_build_dir),
+ "--output-notice-gz",
+ rebase_path(sdk_notice_gz, root_build_dir),
+ "--notice-title",
+ "Notices for files contained in SDK in this directory:",
+ "--static-library-notice-dir",
+ rebase_path(static_libraries_notice_dir, root_build_dir),
+ "--target-cpu",
+ target_cpu,
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+
+ foreach(type, sdk_types) {
+ foreach(os, sdk_systems) {
+ args += [
+ "--collected-notice-zipfile",
+ rebase_path("${sdk_notice_archive_dir}/${os}-${type}.zip",
+ root_build_dir),
+ ]
+ inputs += [ "${sdk_notice_archive_dir}/${os}-${type}.zip" ]
+ }
+ }
+}
+
+action("verify_sdk_notice_file") {
+ deps = [ ":generate_sdk_notice" ]
+
+ script = "//build/core/build_scripts/verify_notice.sh"
+ _verify_result = "${target_out_dir}/sdk_notice_verify_result.out"
+
+ outputs = [ _verify_result ]
+
+ args = [
+ rebase_path(sdk_notice_txt, root_build_dir),
+ rebase_path(_verify_result, root_build_dir),
+ rebase_path("${product_output_dir}/ohos-sdk", root_build_dir),
+ ]
+}
+
+group("ohos_sdk_pre") {
+ deps = [
+ ":generate_sdk_notice",
+ ":verify_sdk_notice_file",
+ "${generated_files_dir}:generated_ohos_sdk",
+ ]
+}
+
+action_with_pydeps("add_notice_file") {
+ deps = [
+ ":generate_sdk_notice",
+ ":ohos_sdk_pre",
+ ]
+ script = "//build/ohos/sdk/add_notice_file.py"
+ _output = target_gen_dir + "/$target_name.stamp"
+ depfile = "$target_gen_dir/$target_name.d"
+ args = [
+ "--sdk-notice-file",
+ rebase_path(sdk_notice_txt, root_build_dir),
+ "--sdk-archive-dir",
+ rebase_path("${product_output_dir}/ohos-sdk", root_build_dir),
+ "--output",
+ rebase_path(_output, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--skip-pattern",
+ "native",
+ ]
+
+ inputs = [ sdk_notice_txt ]
+ outputs = [ _output ]
+}
+
+group("ohos_sdk") {
+ deps = [
+ ":add_notice_file",
+ ":ohos_sdk_pre",
+ ]
+}
diff --git a/dsoftbus/build/ohos/sdk/add_notice_file.py b/dsoftbus/build/ohos/sdk/add_notice_file.py
new file mode 100755
index 0000000000000000000000000000000000000000..6c6bc58741be2293210216ca17e9cce2507c6efa
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/add_notice_file.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import zipfile
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ build_utils.add_depfile_option(parser)
+
+ parser.add_argument('--sdk-notice-file', required=True)
+ parser.add_argument('--sdk-archive-dir', default=None)
+ parser.add_argument('--output', required=True)
+ parser.add_argument('--skip-pattern', required=True)
+
+ options = parser.parse_args()
+
+ sdk_archives = []
+ for root, _, files in os.walk(options.sdk_archive_dir):
+ sdk_archives.extend(
+ [os.path.join(root, f) for f in files if f.endswith('.zip')])
+ for archive in sdk_archives:
+ if options.skip_pattern in archive:
+ continue
+ with zipfile.ZipFile(archive, 'a') as zip_file, open(
+ options.sdk_notice_file) as notice:
+ if not zip_file.namelist():
+ continue
+ dirname = zip_file.namelist()[0].split('/')[0]
+ arcname = os.path.join(dirname,
+ os.path.basename(options.sdk_notice_file))
+ if arcname in zip_file.namelist():
+ print("Warning: {} is already in {}".format(arcname, archive))
+ continue
+ zip_info = zipfile.ZipInfo(filename=arcname,
+ date_time=build_utils.HERMETIC_TIMESTAMP)
+ zip_file.writestr(zip_info,
+ notice.read(),
+ compress_type=zipfile.ZIP_STORED)
+ build_utils.touch(options.output)
+
+ build_utils.write_depfile(options.depfile,
+ options.output,
+ sdk_archives,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sdk/add_notice_file.pydeps b/dsoftbus/build/ohos/sdk/add_notice_file.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..83da0e4f6906575f299db617e73338361fa6dade
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/add_notice_file.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sdk --output build/ohos/sdk/add_notice_file.pydeps build/ohos/sdk/add_notice_file.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+add_notice_file.py
diff --git a/dsoftbus/build/ohos/sdk/copy_sdk_modules.py b/dsoftbus/build/ohos/sdk/copy_sdk_modules.py
new file mode 100755
index 0000000000000000000000000000000000000000..d4c6b245eb101c12708ebad84fe13dd48127f740
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/copy_sdk_modules.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import shutil
+import zipfile
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+from scripts.util.file_utils import read_json_file # noqa: E402
+
+
+def get_source_from_module_info_file(module_info_file):
+ data = read_json_file(module_info_file)
+ if data is None:
+ raise Exception("read file '{}' failed.".format(module_info_file))
+ source = data.get('source')
+ notice = data.get('notice')
+ return source, notice
+
+
+def do_copy_and_stamp(copy_infos, options, depfile_deps):
+ notice_tuples = []
+ for cp_info in copy_infos:
+ source = cp_info.get('source')
+ dest = cp_info.get('dest')
+ notice = cp_info.get('notice')
+ install_dir = cp_info.get('install_dir')
+ if os.path.isdir(source):
+ if os.listdir(source):
+ files = build_utils.get_all_files(source)
+ if files:
+ shutil.copytree(source, dest, dirs_exist_ok=True)
+ depfile_deps.update(build_utils.get_all_files(source))
+ else:
+ # Skip empty directories.
+ depfile_deps.add(source)
+ else:
+ dest_dir = os.path.dirname(dest)
+ os.makedirs(dest_dir, exist_ok=True)
+ shutil.copy2(source, dest)
+ depfile_deps.add(source)
+ if notice and os.path.exists(notice):
+ depfile_deps.add(notice)
+ if notice.endswith('.zip'):
+ suffix = ".zip"
+ else:
+ suffix = ".txt"
+ if os.path.isdir(source):
+ notice_dest = os.path.join('{}{}'.format(install_dir, suffix))
+ else:
+ notice_dest = os.path.join(
+ install_dir, '{}{}'.format(os.path.basename(source),
+ suffix))
+ notice_tuples.append((notice_dest, notice))
+ build_utils.zip_dir(options.sdk_output_archive,
+ options.archive_dir,
+ zip_prefix_path=options.zip_prefix_path)
+ with zipfile.ZipFile(options.notice_output_archive, 'w') as outfile:
+ for zip_path, fs_path in notice_tuples:
+ build_utils.add_to_zip_hermetic(outfile,
+ zip_path,
+ src_path=fs_path)
+
+ build_utils.write_depfile(options.depfile,
+ options.sdk_output_archive,
+ depfile_deps,
+ add_pydeps=False)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ build_utils.add_depfile_option(parser)
+
+ parser.add_argument('--sdk-modules-desc-file', required=True)
+ parser.add_argument('--sdk-archive-paths-file', required=True)
+ parser.add_argument('--dest-dir', required=True)
+ parser.add_argument('--archive-dir', required=True)
+ parser.add_argument('--zip-prefix-path', default=None)
+ parser.add_argument('--notice-output-archive', required=True)
+ parser.add_argument('--sdk-output-archive', required=True)
+
+ options = parser.parse_args()
+
+ sdk_modules_desc_file = options.sdk_modules_desc_file
+ sdk_out_dir = options.dest_dir
+ sdk_archive_paths_file = options.sdk_archive_paths_file
+
+ sdk_modules = read_json_file(sdk_modules_desc_file)
+ if sdk_modules is None:
+ sdk_modules = []
+
+ archive_paths = read_json_file(sdk_archive_paths_file)
+ if archive_paths is None:
+ archive_paths = []
+
+ depfile_deps = set(
+ [options.sdk_modules_desc_file, options.sdk_archive_paths_file])
+ copy_infos = []
+ for module in sdk_modules:
+ cp_info = {}
+ sdk_label = module.get('label')
+ module_info_file = module.get('module_info_file')
+ source, notice = get_source_from_module_info_file(module_info_file)
+ cp_info['source'] = source
+ cp_info['notice'] = notice
+ depfile_deps.add(module_info_file)
+
+ for item in archive_paths:
+ if sdk_label == item.get('label'):
+ dest = os.path.join(sdk_out_dir, item.get('install_dir'),
+ os.path.basename(source))
+ break
+ cp_info['dest'] = dest
+ cp_info['install_dir'] = item.get('install_dir')
+ copy_infos.append(cp_info)
+
+ do_copy_and_stamp(copy_infos, options, depfile_deps)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sdk/copy_sdk_modules.pydeps b/dsoftbus/build/ohos/sdk/copy_sdk_modules.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..e75fded634f92be13bfd44ccb784a30fadb1a1a5
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/copy_sdk_modules.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/sdk --output build/ohos/sdk/copy_sdk_modules.pydeps build/ohos/sdk/copy_sdk_modules.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+copy_sdk_modules.py
diff --git a/dsoftbus/build/ohos/sdk/generate_all_types_sdk.py b/dsoftbus/build/ohos/sdk/generate_all_types_sdk.py
new file mode 100755
index 0000000000000000000000000000000000000000..a917d7486a82024afbbe61cd9776c5fdccfdeea3
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/generate_all_types_sdk.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+
+sys.path.append(
+ os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_file, read_json_file # noqa: E402
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+def read_display_name(type2displayname):
+ return read_json_file(type2displayname)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--sdk-systems', action='append')
+ parser.add_argument('--sdk-modules')
+ parser.add_argument('--sdk-types', action='append')
+ parser.add_argument('--current-dir')
+ parser.add_argument('--generated-sdk-modules-gni')
+ parser.add_argument('--type2displayname')
+ parser.add_argument('--api-version')
+ parser.add_argument('--release-type')
+ parser.add_argument('--meta-version')
+
+ parser.add_argument('--output', required=True)
+
+ options = parser.parse_args()
+
+ template = Template("""#Generated code, DONOT modify it.
+import("//build/ohos/build_var.gni")
+import("//build/ohos/sdk/sdk.gni")
+import("${build_configs_path}/platforms_list.gni")
+import("{{ generated_sdk_modules_gni }}")
+
+{% for sdk_type in sdk_types %}
+ {% set _sdk_type = sdk_type.replace('-', '_') %}
+ if (defined({{ ohos_sdk_modules }}.{{ _sdk_type }})) {
+ {{ _sdk_type }}s = {{ ohos_sdk_modules }}. {{ _sdk_type }}
+ group("{{ _sdk_type }}") {
+ public_deps = []
+ {% for os in sdk_systems %}
+ public_deps += [":{{ _sdk_type }}_{{ os }}"]
+ {% endfor %}
+ }
+
+ {% for os in sdk_systems %}
+ make_{{ os }}_sdk_modules("{{ _sdk_type }}_{{ os }}") {
+ sdk_type = "{{ sdk_type }}"
+ sdk_modules = {{ _sdk_type }}s.{{ os }}
+ {% if release_type != "" %}
+ zipfile_name =
+ "${sdk_type}-${{ "{" }}sdk_system_{{ os }}{{ "}" }}-${current_sdk_version}-${release_type}.zip"
+ {% else %}
+ zipfile_name =
+ "${sdk_type}-${{ "{" }}sdk_system_{{ os }}{{ "}" }}-${current_sdk_version}.zip"
+ {% endif %}
+ }
+ {% endfor %}
+ }
+{% endfor %}
+
+foreach(os, sdk_systems) {
+ {% for sdk_type in sdk_types %}
+ {% if display_name.get(sdk_type) %}
+ {% set _display_name = display_name.get(sdk_type) %}
+ {% else %}
+ {% set _display_name = sdk_type.capitalize() %}
+ {% endif %}
+ package_info_file =
+ "$ohos_sdk_copy_dir/$os/{{ sdk_type }}/oh-uni-package.json"
+ package_info = {}
+ package_info = {
+ path = "{{ sdk_type }}"
+ displayName = "{{ _display_name }}"
+ version = current_sdk_version
+
+ {% if release_type != "" %}
+ releaseType = "{{ release_type }}"
+ {% endif %}
+
+ {% if meta_version != "" %}
+ meta = {
+ metaVersion = "{{ meta_version }}"
+ }
+ {% endif %}
+
+ {% if sdk_type != "toolchains" %}
+ apiVersion = "{{ api_version }}"
+ {% endif %}
+ }
+ write_file(package_info_file, package_info, "json")
+ {% endfor %}
+}
+
+group("generated_ohos_sdk") {
+ public_deps = []
+ {% for sdk_type in sdk_types %}
+ {% set _sdk_type = sdk_type.replace('-', '_') %}
+ if (defined({{ ohos_sdk_modules }}.{{ _sdk_type }})) {
+ public_deps += [ ":{{ _sdk_type }}" ]
+ }
+ {% endfor %}
+ public_deps += {{ ohos_sdk_modules }}.extras
+}""",
+ trim_blocks=True,
+ lstrip_blocks=True)
+
+ contents = template.render(
+ ohos_sdk_modules=options.sdk_modules,
+ sdk_types=options.sdk_types,
+ current_dir=options.current_dir,
+ sdk_systems=options.sdk_systems,
+ display_name=read_display_name(options.type2displayname),
+ api_version=options.api_version,
+ release_type=options.release_type,
+ meta_version=options.meta_version,
+ generated_sdk_modules_gni=options.generated_sdk_modules_gni)
+ write_file(options.output, contents)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sdk/ohos_sdk_description_std.json b/dsoftbus/build/ohos/sdk/ohos_sdk_description_std.json
new file mode 100755
index 0000000000000000000000000000000000000000..b9a1c703aeb3f8fc3dff8397387a8c05229c992d
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/ohos_sdk_description_std.json
@@ -0,0 +1,360 @@
+[
+ {
+ "install_dir": "toolchains",
+ "module_label": "//developtools/hdc_standard:hdc_std",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//developtools/hdc:hdc",
+ "target_os": [
+ "linux"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//developtools/hdc:hdc_win",
+ "target_os": [
+ "windows"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//developtools/hdc:hdc_darwin",
+ "target_os": [
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//developtools/hdc:hdc_lib_winapi",
+ "target_os": [
+ "windows"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//developtools/hdc:hdc_lib_winusbapi",
+ "target_os": [
+ "windows"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//prebuilts/signcenter:lib",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//prebuilts/build-tools/common/restool:restool_linux",
+ "target_os": [
+ "linux"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//prebuilts/build-tools/common/restool:restool_windows",
+ "target_os": [
+ "windows"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//prebuilts/build-tools/common/restool:restool_darwin",
+ "target_os": [
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/api",
+ "module_label": "//interface/sdk-js:ace_platform",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/api/phone",
+ "module_label": "//interface/sdk-js:ohos_declaration_phone",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/form",
+ "module_label": "//interface/sdk-js:form_declaration",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/",
+ "module_label": "//prebuilts/sdk/js-loader:ace_loader",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/",
+ "module_label": "//third_party/quickjs:qjsc",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/",
+ "module_label": "//third_party/jerryscript:jerry",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/",
+ "module_label": "//third_party/jerryscript:jerry-snapshot",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader",
+ "module_label": "//developtools/ace-js2bundle:ace_loader",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/lib/",
+ "module_label": "//developtools/ace-js2bundle:ace_loader_library",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/ark/",
+ "module_label": "//ark/ts2abc/ts2panda:panda_ts2abc",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader/bin/ark/",
+ "module_label": "//ark/ts2abc/ts2panda:panda_ts2abc_ets",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/ark/build/",
+ "module_label": "//ark/ts2abc/ts2panda:ts2abc_build",
+ "target_os": [
+ "linux"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/ark/build-win/",
+ "module_label": "//ark/ts2abc/ts2panda:ts2abc_build_win",
+ "target_os": [
+ "windows"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/ace-loader/bin/ark/build-mac/",
+ "module_label": "//ark/ts2abc/ts2panda:ts2abc_build_mac",
+ "target_os": [
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader/bin/ark/build/",
+ "module_label": "//ark/ts2abc/ts2panda:ts2abc_build_ets",
+ "target_os": [
+ "linux"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader/bin/ark/build-win/",
+ "module_label": "//ark/ts2abc/ts2panda:ts2abc_build_win_ets",
+ "target_os": [
+ "windows"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader/bin/ark/build-mac/",
+ "module_label": "//ark/ts2abc/ts2panda:ts2abc_build_mac_ets",
+ "target_os": [
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/api/common/@internal",
+ "module_label": "//interface/sdk-js:ets_internal_api",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/api/common",
+ "module_label": "//interface/sdk-js:common_api",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/api/common",
+ "module_label": "//interface/sdk-js:ohos_declaration_ets",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/component",
+ "module_label": "//interface/sdk-js:ets_component",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader",
+ "module_label": "//developtools/ace-ets2bundle:ets_loader",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader/lib",
+ "module_label": "//developtools/ace-ets2bundle:ets_loader_library",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "ets/build-tools/ets-loader/declarations",
+ "module_label": "//developtools/ace-ets2bundle:ets_loader_declaration",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//ark/runtime_core/assembler:ark_asm",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains",
+ "module_label": "//ark/runtime_core/disassembler:ark_disasm",
+ "target_os": [
+ "windows",
+ "linux",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains/lib",
+ "module_label": "//developtools/packing_tool/jar:app_packing_tool",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains/lib",
+ "module_label": "//developtools/packing_tool/jar:app_unpacking_tool",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "previewer",
+ "module_label": "//developtools/ace-js2bundle:previewer_copy",
+ "target_os": [
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/binary-tools",
+ "module_label": "//developtools/packing_tool/jar:fastjson_utils_java",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "js/build-tools/binary-tools",
+ "module_label": "//developtools/packing_tool/jar:haptobin_tool",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains/configcheck",
+ "module_label": "//developtools/packing_tool/configcheck:configSchema_lite_json",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ },
+ {
+ "install_dir": "toolchains/configcheck",
+ "module_label": "//developtools/packing_tool/configcheck:configSchema_rich_json",
+ "target_os": [
+ "linux",
+ "windows",
+ "darwin"
+ ]
+ }
+]
diff --git a/dsoftbus/build/ohos/sdk/parse_sdk_description.py b/dsoftbus/build/ohos/sdk/parse_sdk_description.py
new file mode 100755
index 0000000000000000000000000000000000000000..edcfa1255397fffd2966ba3ceb87c5761918cea4
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/parse_sdk_description.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import pathlib
+import re
+
+sys.path.append(
+ os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_file, read_json_file, \
+ write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+_SOURCE_ROOT = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1, os.path.join(_SOURCE_ROOT, 'third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+KEYS = ['target_os', 'install_dir', 'module_label', 'build_only']
+
+
+class SdkTargets:
+
+ def __init__(self, os_type):
+ self.os_type = os_type
+ self.targets = []
+
+ def add_target(self, target):
+ if target not in self.targets:
+ self.targets.append(target)
+
+ def get_targets(self):
+ return self.targets
+
+
+def check_keys(keys):
+ for key in keys:
+ if key not in KEYS:
+ raise Exception(
+ 'Error: failed to parse ohos sdk description file, missing {}.'
+ .format(key))
+
+
+def get_sdk_type(path_name):
+ p = pathlib.Path(path_name)
+ if path_name.startswith('/'):
+ top_dir = p.parts[1]
+ else:
+ top_dir = p.parts[0]
+ return top_dir
+
+
+def add_target(item, target, sdk_systems):
+ for _os in sdk_systems:
+ if _os == 'linux' or _os == 'Linux':
+ item.get('targets').get('linux').add_target('"%s",' % target)
+ elif _os == 'windows' or _os == 'Windows':
+ item.get('targets').get('windows').add_target('"%s",' % target)
+ elif _os == 'darwin' or _os == 'Darwin':
+ item.get('targets').get('darwin').add_target('"%s",' % target)
+
+
+def write_sdk_build_gni(sdk_targets, build_only_targets, gni):
+ template = Template(
+ """#Generated code, DONOT modify it.
+ ohos_sdk_modules = {
+ {% for item in sdk_targets %}
+
+ {% set sdk_type = item.get('type') %}
+ {% set targets = item.get('targets') %}
+ {% set systems = targets.keys() %}
+ {% set _sdk_type = sdk_type.replace('-', '_') %}
+
+ {{ _sdk_type }} = {
+ {% for os in systems %}
+ {{ os }} = [
+ {% for t in targets.get(os).get_targets() %}
+ {{ t }}
+ {% endfor %}
+ ]
+ {% endfor %}
+ }
+ {% endfor %}
+
+ extras = [
+ {% for t in build_only_targets%}
+ "{{ t }}",
+ {% endfor %}
+ ]
+ }
+ """,
+ trim_blocks=True,
+ lstrip_blocks=True)
+
+ contents = template.render(
+ sdk_targets=sdk_targets, build_only_targets=build_only_targets)
+ write_file(gni, contents)
+
+
+def get_build_gn(label):
+ match = re.search(r"(.*?):(.*?)", label)
+ if match:
+ gn = '{}/BUILD.gn'.format(match.group(1))
+ if gn.startswith("//"):
+ return gn[len("//"):]
+ else:
+ return gn
+ else:
+ raise Exception("failed to get BUILD.gn of {}".format(label))
+
+
+def variant_to_product(variant, options):
+ relations = read_json_file(options.variant_to_product)
+ if variant in relations.keys():
+ return relations.get(variant)
+ else:
+ raise Exception('Error: failed to read {} in {}'.format(
+ variant, options.variant_to_product))
+
+
+def expand_platform_targets(options, label, install_dir):
+ base = options.base_platform
+ platforms = options.platforms
+ variant = list(set(platforms) - set([base]))
+
+ if label.find('${base}') != -1:
+ return [label.replace('${base}', base)], [install_dir]
+ elif label.find('${platforms}') != -1:
+ return [label.replace('${platforms}', p) for p in platforms], [
+ install_dir.replace('${platforms}',
+ variant_to_product(c, options))
+ for c in platforms
+ ]
+ elif label.find('${variant}') != -1:
+ return [label.replace('${variant}', c) for c in variant], [
+ install_dir.replace('${variant}', variant_to_product(c, options))
+ for c in variant
+ ]
+ else:
+ return [label], [install_dir]
+
+
+def parse_description_file(options):
+ data = read_json_file(options.sdk_description_file)
+ if data is None:
+ raise Exception("read file '{}' failed.".format(
+ options.sdk_description_file))
+
+ module_install_infos = []
+ sdk_types = []
+ sdk_targets = []
+ build_only_targets = []
+
+ for d in data:
+ check_keys(d.keys())
+
+ label = d.get('module_label')
+ install_dir = d.get('install_dir')
+ build_only = d.get('build_only')
+
+ # skip labels that we cannot find.
+ rebased_build_gn = build_utils.rebase_path(
+ get_build_gn(label), current_base=options.source_root_dir)
+ if not os.path.exists(rebased_build_gn):
+ continue
+
+ if build_only:
+ build_only_targets.append(label)
+ continue
+
+ module_labels, install_dirs = expand_platform_targets(
+ options, label, install_dir)
+ target_os = d.get('target_os')
+
+ sdk_type = get_sdk_type(install_dir)
+ if sdk_type not in sdk_types:
+ sdk_targets.append({
+ 'type': sdk_type,
+ 'targets': {
+ 'linux': SdkTargets('linux'),
+ 'windows': SdkTargets('windows'),
+ 'darwin': SdkTargets('darwin')
+ }
+ })
+ sdk_types.append(sdk_type)
+ for item in sdk_targets:
+ if item['type'] == sdk_type:
+ for m in module_labels:
+ add_target(item, m, target_os)
+
+ for i in range(len(module_labels)):
+ install_info = {
+ 'label': module_labels[i],
+ 'install_dir': install_dirs[i]
+ }
+ module_install_infos.append(install_info)
+
+ return {
+ "sdk_targets": sdk_targets,
+ "install_infos": module_install_infos,
+ "sdk_types": sdk_types,
+ "build_only_targets": build_only_targets
+ }
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--sdk-description-file', required=True)
+ parser.add_argument('--sdk-install-info-file', required=True)
+ parser.add_argument('--sdk-modules-gni', required=True)
+ parser.add_argument('--sdk-types-file', required=True)
+ parser.add_argument('--base-platform', required=True)
+ parser.add_argument('--platforms', action='append', required=True)
+ parser.add_argument('--source-root-dir', required=True)
+ parser.add_argument('--variant-to-product', required=True)
+
+ options = parser.parse_args()
+
+ data = parse_description_file(options)
+
+ write_sdk_build_gni(
+ data.get('sdk_targets'), data.get('build_only_targets'),
+ options.sdk_modules_gni)
+ write_json_file(options.sdk_install_info_file, data.get('install_infos'))
+ with open(options.sdk_types_file, 'w') as f:
+ f.write('\n'.join(data.get('sdk_types')))
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/sdk/sdk.gni b/dsoftbus/build/ohos/sdk/sdk.gni
new file mode 100755
index 0000000000000000000000000000000000000000..1825fe7195cb60bcfdae67949eb19977753c1183
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/sdk.gni
@@ -0,0 +1,195 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos/build_var.gni")
+import("//build/ohos/notice/notice.gni")
+import("//build/version.gni")
+
+sdk_base_build_gn = "//build/ohos/sdk/BUILD.gn"
+generated_files_dir = get_path_info(sdk_base_build_gn, "gen_dir")
+generated_sdk_module_install_paths =
+ "${generated_files_dir}/ohos_sdk_install_paths.json"
+
+sdk_system_windows = "windows"
+sdk_system_linux = "linux"
+sdk_system_darwin = "darwin"
+
+ohos_sdk_out_dir = "$product_output_dir/ohos-sdk"
+ohos_sdk_copy_dir = "$root_build_dir/ohos-sdk"
+
+sdk_toolchains = {
+ linux = "//build/toolchain/linux:clang_x64"
+ windows = "//build/toolchain/mingw:mingw_x86_64"
+ darwin = "//build/toolchain/mac:clang_x64"
+}
+
+if (host_os == "mac") {
+ sdk_systems = [ sdk_system_darwin ]
+} else {
+ sdk_systems = [
+ sdk_system_windows,
+ sdk_system_linux,
+ ]
+}
+
+template("copy_and_archive") {
+ assert(defined(invoker.dest_dir))
+ assert(defined(invoker.sdk_system))
+ assert(defined(invoker.sdk_type))
+ assert(defined(invoker.sdk_modules_desc_file))
+ forward_variables_from(invoker, [ "testonly" ])
+
+ action_with_pydeps(target_name) {
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ script = "//build/ohos/sdk/copy_sdk_modules.py"
+ depfile = "$target_gen_dir/$target_name.d"
+
+ _sdk_output_archive =
+ "$ohos_sdk_out_dir/${invoker.sdk_system}/${invoker.zipfile_name}"
+ _notice_output_archive = "${sdk_notice_archive_dir}/${invoker.sdk_system}-${invoker.sdk_type}.zip"
+ outputs = [
+ _sdk_output_archive,
+ _notice_output_archive,
+ ]
+
+ args = [
+ "--sdk-modules-desc-file",
+ rebase_path(invoker.sdk_modules_desc_file, root_build_dir),
+ "--sdk-archive-paths-file",
+ rebase_path(generated_sdk_module_install_paths, root_build_dir),
+ "--dest-dir",
+ rebase_path(invoker.dest_dir, root_build_dir),
+ "--sdk-output-archive",
+ rebase_path(_sdk_output_archive, root_build_dir),
+ "--notice-output-archive",
+ rebase_path(_notice_output_archive, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--archive-dir",
+ rebase_path("${invoker.dest_dir}/${invoker.sdk_type}", root_build_dir),
+ ]
+
+ if (defined(invoker.zip_prefix_path)) {
+ args += [
+ "--zip-prefix-path",
+ invoker.zip_prefix_path,
+ ]
+ }
+ }
+}
+
+template("make_sdk_modules") {
+ assert(defined(invoker.zipfile_name))
+ assert(defined(invoker.sdk_modules))
+ assert(defined(invoker.sdk_toolchain))
+ assert(defined(invoker.sdk_type))
+ assert(defined(invoker.sdk_system))
+
+ if (invoker.sdk_modules == []) {
+ not_needed(invoker, [ "sdk_toolchain" ])
+ }
+ copy_and_archive(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "sdk_system",
+ "sdk_type",
+ "zipfile_name",
+ ])
+ _sdk_modules = []
+ _sdk_module_infos = []
+
+ foreach(_label, invoker.sdk_modules) {
+ _target_label = get_label_info(_label, "label_no_toolchain")
+ sources = [ _target_label ]
+ if (sources == []) {
+ _sdk_modules += [ _target_label ]
+ } else {
+ _sdk_modules += [ "${_target_label}(${invoker.sdk_toolchain})" ]
+ }
+ sources = []
+ set_sources_assignment_filter([])
+ }
+ not_needed(invoker, [ "sdk_toolchain" ])
+
+ foreach(_label, _sdk_modules) {
+ _module_info_file = get_label_info(_label, "target_out_dir") + "/" +
+ get_label_info(_label, "name") + "_module_info.json"
+ _sdk_module_infos += [
+ {
+ label = get_label_info(_label, "label_no_toolchain")
+ module_info_file = rebase_path(_module_info_file, root_build_dir)
+ },
+ ]
+ }
+ sdk_modules_desc_file = "${target_gen_dir}/${target_name}_sdk_modules.json"
+ write_file(sdk_modules_desc_file, _sdk_module_infos, "json")
+
+ deps = _sdk_modules
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ dest_dir = "${ohos_sdk_copy_dir}/${sdk_system}"
+ zip_prefix_path = "${invoker.sdk_type}"
+ }
+}
+
+template("make_linux_sdk_modules") {
+ make_sdk_modules(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "zipfile_name",
+ "sdk_modules",
+ "sdk_type",
+ "deps",
+ ])
+ sdk_toolchain = sdk_toolchains.linux
+ sdk_system = sdk_system_linux
+ }
+}
+
+template("make_windows_sdk_modules") {
+ make_sdk_modules(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "zipfile_name",
+ "sdk_modules",
+ "sdk_type",
+ "deps",
+ ])
+ sdk_toolchain = sdk_toolchains.windows
+ sdk_system = sdk_system_windows
+ }
+}
+
+template("make_darwin_sdk_modules") {
+ make_sdk_modules(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "zipfile_name",
+ "sdk_modules",
+ "sdk_type",
+ "deps",
+ ])
+ sdk_toolchain = sdk_toolchains.darwin
+ sdk_system = sdk_system_darwin
+ }
+}
diff --git a/dsoftbus/build/ohos/sdk/type_to_display_name.json b/dsoftbus/build/ohos/sdk/type_to_display_name.json
new file mode 100644
index 0000000000000000000000000000000000000000..283bfad6238828181f8b05ec61aaae9ce1704994
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/type_to_display_name.json
@@ -0,0 +1,5 @@
+{
+ "toolchains": "Toolchains",
+ "js": "Js",
+ "java": "Java"
+}
diff --git a/dsoftbus/build/ohos/sdk/variant_to_product.json b/dsoftbus/build/ohos/sdk/variant_to_product.json
new file mode 100644
index 0000000000000000000000000000000000000000..1cbed94b480cc5372d29b71f23b47dcd03f3754a
--- /dev/null
+++ b/dsoftbus/build/ohos/sdk/variant_to_product.json
@@ -0,0 +1,6 @@
+{
+ "ivi": "car",
+ "wearable": "wearable",
+ "phone": "default",
+ "intellitv": "tv"
+}
diff --git a/dsoftbus/build/ohos/statistics/build_overlap_statistics.py b/dsoftbus/build/ohos/statistics/build_overlap_statistics.py
new file mode 100755
index 0000000000000000000000000000000000000000..c253ebdad4319736016fdb261e12d08c63a04863
--- /dev/null
+++ b/dsoftbus/build/ohos/statistics/build_overlap_statistics.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import fnmatch
+import argparse
+import json
+
+
+def is_combine_jars(contents):
+ if contents.get('deps_info').get('combine_target'):
+ return True
+ else:
+ return False
+
+
+def get_sources_file(build_config):
+ if not fnmatch.fnmatch(build_config, "*java*.build_config"):
+ return None
+ with open(build_config, 'r') as config_f:
+ contents = json.load(config_f)
+ if is_combine_jars(contents):
+ return None
+ return contents.get('deps_info').get('java_sources_file')
+
+
+def read_file(sources_file):
+ contents = []
+ if not os.path.exists(sources_file):
+ return []
+ with open(sources_file, 'r') as this:
+ contents = [line.strip() for line in this.readlines()]
+ return contents
+
+
+def get_subsystem_paths(file, root_dir):
+ paths = {
+ 'common': 'build',
+ 'third_party': 'third_party',
+ 'test': 'test',
+ 'mcl': 'mcl'
+ }
+ with open(file, 'r') as jfile:
+ data = json.load(jfile)
+ for key in data.keys():
+ path = data.get(key).get('path')
+ if os.path.exists(os.path.join(root_dir, path)):
+ paths[key] = path
+
+ return paths
+
+
+def overlap_rate_key(element):
+ return element['overlap_rate']
+
+
+def compute_overlap_rate_by_subsystem(options, paths, program_language):
+ objs = []
+ if program_language == 'c':
+ pattern = '*.o'
+ if program_language == 'java':
+ pattern = '*.build_config'
+ for root, _, files in os.walk(options.build_out_dir):
+ for file in fnmatch.filter(files, pattern):
+ if program_language == 'c':
+ splits = os.path.join(root, file).split('/obj/')
+ obj = ''.join(splits[1:])
+ if obj == '':
+ continue
+ if obj.find('/gen/') != -1:
+ splits = obj.split('/gen/')
+ obj = ''.join(splits[1:])
+ objs.append(obj)
+ if program_language == 'java':
+ sources_file = get_sources_file(os.path.join(root, file))
+ if not sources_file:
+ continue
+ for java_file in read_file(
+ os.path.join(options.build_out_dir, sources_file)):
+ if fnmatch.fnmatch(java_file, "*/generated_java/*"):
+ continue
+ objs.append(java_file)
+
+ total_builds = len(objs)
+ total_files = len(set(objs))
+ if total_builds == 0 or total_files == 0:
+ return
+
+ statistics = []
+ for subsystem in sorted(paths.keys()):
+ path = paths.get(subsystem)
+ if program_language == 'c':
+ pattern = '{}*'.format(path)
+ if program_language == 'java':
+ pattern = '../../{}*'.format(path)
+
+ sub_objs = []
+ for obj in fnmatch.filter(objs, pattern):
+ sub_objs.append(obj)
+ builds = len(sub_objs)
+ files = len(set(sub_objs))
+ if files == 0:
+ continue
+ overlap_rate = float(builds) / float(files)
+ sub_stat = {
+ "builds": builds,
+ "builds_percentage": 100 * float(builds) / float(total_builds),
+ "files": files,
+ "files_percentage": 100 * float(files) / float(total_files),
+ "overlap_rate": overlap_rate,
+ "subsystem": subsystem,
+ }
+ statistics.append(sub_stat)
+ print('{} targets overlap rate statistics'.format(program_language))
+ print('{:16}\t{:8}\t{:5}\t{:8}\t{:5}\t{:4}'.format(
+ 'subsystem', 'files NO.', 'percentage', 'builds NO.', 'percentage',
+ 'overlap rate'))
+
+ for item in sorted(statistics, key=overlap_rate_key, reverse=True):
+ print('{:16}\t{:8}\t{:2.1f}%\t{:8}\t{:2.1f}%\t{:.2f}'.format(
+ item.get('subsystem'), item.get('files'),
+ item.get('files_percentage'), item.get('builds'),
+ item.get('builds_percentage'), item.get('overlap_rate')))
+ print('\n{} overall build overlap rate: {:.2f}\n\n'.format(
+ program_language,
+ float(total_builds) / float(total_files)))
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-out-dir', help='base directory to analyze.')
+ parser.add_argument('--root-source-dir', help='source root directory.')
+ parser.add_argument(
+ '--subsystem-config-file', help='path to subsystem java targets.')
+
+ options = parser.parse_args()
+
+ paths = get_subsystem_paths(options.subsystem_config_file,
+ os.path.realpath(options.root_source_dir))
+ compute_overlap_rate_by_subsystem(options, paths, 'c')
+ compute_overlap_rate_by_subsystem(options, paths, 'java')
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/testfwk/gen_module_list_files.py b/dsoftbus/build/ohos/testfwk/gen_module_list_files.py
new file mode 100755
index 0000000000000000000000000000000000000000..29a33d9a09bd8c261a76525dd174d89d59f0b09a
--- /dev/null
+++ b/dsoftbus/build/ohos/testfwk/gen_module_list_files.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import json
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils # noqa: E402
+
+
+def _parse_args(args):
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output_dir', help='output directory')
+ parser.add_option('--source_dir', help='source directory')
+ parser.add_option('--target', help='name of target')
+ parser.add_option('--target_label')
+ parser.add_option('--test_type')
+ parser.add_option('--module_list_file', help='file name of module list')
+ options, _ = parser.parse_args(args)
+ build_utils.check_options(options,
+ parser,
+ required=('output_dir', 'target', 'source_dir',
+ 'target_label', 'module_list_file'))
+ return options, parser
+
+
+def main(args):
+ options, _ = _parse_args(args)
+ if not os.path.exists(os.path.dirname(options.module_list_file)):
+ os.makedirs(os.path.dirname(options.module_list_file), exist_ok=True)
+ with open(options.module_list_file, 'w') as f:
+ contents = json.dumps([{
+ 'target': options.target,
+ 'label': options.target_label,
+ 'source_directory': options.source_dir,
+ 'output_directory': options.output_dir,
+ 'test_type': options.test_type
+ }])
+ f.write(contents)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/testfwk/test_py_file_copy.py b/dsoftbus/build/ohos/testfwk/test_py_file_copy.py
new file mode 100755
index 0000000000000000000000000000000000000000..f769565872768cac4deeb727103ba7b486c21b1d
--- /dev/null
+++ b/dsoftbus/build/ohos/testfwk/test_py_file_copy.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import shutil
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def copy_file(target_base_dir, source_files, output_dir):
+ output_files = []
+ for src_file in source_files:
+ if not os.path.exists(src_file):
+ raise Exception("src file '{}' doesn't exist.".format(src_file))
+ relative_path = os.path.relpath(src_file, target_base_dir)
+ output_file = os.path.join(output_dir, relative_path)
+ dest_dir = os.path.dirname(output_file)
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir, exist_ok=True)
+ shutil.copy2(src_file, output_file)
+ output_files.append(output_file)
+ return output_files
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--target-base-dir', required=True)
+ parser.add_argument('--source-files', nargs='+', required=True)
+ parser.add_argument('--copy-output-dir', required=True)
+ parser.add_argument('--outfile', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+
+ copy_out_list = copy_file(args.target_base_dir, args.source_files,
+ args.copy_output_dir)
+ write_file(args.outfile, '\n'.join(copy_out_list))
+
+ if args.depfile:
+ _dep_files = (args.source_files + copy_out_list).sort()
+ build_utils.write_depfile(args.depfile,
+ args.outfile,
+ _dep_files,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/testfwk/test_py_file_copy.pydeps b/dsoftbus/build/ohos/testfwk/test_py_file_copy.pydeps
new file mode 100755
index 0000000000000000000000000000000000000000..45be270185b48e5fdf9e94edb3e0d2f16fb51c8a
--- /dev/null
+++ b/dsoftbus/build/ohos/testfwk/test_py_file_copy.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/testfwk --output build/ohos/testfwk/test_py_file_copy.pydeps build/ohos/testfwk/test_py_file_copy.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+test_py_file_copy.py
diff --git a/dsoftbus/build/ohos/testfwk/testcase_resource_copy.py b/dsoftbus/build/ohos/testfwk/testcase_resource_copy.py
new file mode 100755
index 0000000000000000000000000000000000000000..254945d6008d0ce0ed19053df111931849dcb52a
--- /dev/null
+++ b/dsoftbus/build/ohos/testfwk/testcase_resource_copy.py
@@ -0,0 +1,308 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import shutil
+import xml.etree.ElementTree as ET
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+
+def copy_dir(src, dest):
+ if not os.path.exists(src):
+ raise Exception("src dir '{}' doesn't exist.".format(src))
+ if not os.path.exists(dest):
+ os.makedirs(dest, exist_ok=True)
+ result_files = []
+ src_files = []
+ for root, _, files in os.walk(src):
+ for _file in files:
+ file_path = os.path.join(root, _file)
+ src_files.append(file_path)
+ for src_path in src_files:
+ if os.path.islink(src_path):
+ continue
+ file_relpath = os.path.relpath(src_path, src)
+ dest_path = os.path.join(dest, file_relpath)
+ dest_dir = os.path.dirname(dest_path)
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir, exist_ok=True)
+ shutil.copy2(src_path, dest_path)
+ result_files.append(src_path)
+ result_files.append(dest_path)
+ return result_files
+
+
+def _resources_with_xml_v1(root, testcase_target_name, test_resource_path,
+ part_build_out_path, resource_output_path):
+ _out_resources_list = []
+ for target in root:
+ if target.attrib.get('name') != testcase_target_name:
+ continue
+ for _depend in target:
+ _findpath = _depend.attrib.get('findpath')
+ _resource_file = _depend.attrib.get('resource')
+ if _findpath == 'res':
+ _resource_src = os.path.join(test_resource_path,
+ _resource_file)
+ _res_dest = os.path.join(resource_output_path, _resource_file)
+ elif _findpath == 'out':
+ if not os.path.exists(_resource_file):
+ __dir_name = _resource_file.split('/')[0]
+ _resource_file_new = os.path.join(__dir_name,
+ _resource_file)
+ _resource_src_new = os.path.join(part_build_out_path,
+ _resource_file_new)
+ if os.path.exists(_resource_src_new):
+ _resource_src = _resource_src_new
+ _res_dest = os.path.join(resource_output_path,
+ _resource_file)
+ else:
+ _resource_src = ''
+ _res_dest = ''
+ else:
+ _resource_src = os.path.join(part_build_out_path,
+ _resource_file)
+ _res_dest = os.path.join(resource_output_path,
+ _resource_file)
+ else:
+ raise Exception(
+ "resource findpath type '{}' not support.".format(
+ _findpath))
+ if _resource_src:
+ _out_resources_list.append({
+ "src":
+ os.path.relpath(_resource_src),
+ "dest":
+ os.path.relpath(_res_dest)
+ })
+ return _out_resources_list
+
+
+def _parse_res_value(value):
+ res_file = value.split('->')[0].strip()
+ return res_file
+
+
+def _resources_with_xml_v2(root, testcase_target_name, test_resource_path,
+ part_build_out_path, resource_output_path):
+ _out_resources_list = []
+ for target in root:
+ if target.attrib.get('name') != testcase_target_name:
+ continue
+ for child in target:
+ if child.tag != 'preparer':
+ continue
+ for _option in child:
+ if _option.attrib.get('name') != 'push':
+ continue
+ _src_type = _option.attrib.get('src')
+ _resource_file_val = _option.attrib.get('value')
+ _resource_file = _parse_res_value(_resource_file_val)
+ if _src_type == 'res':
+ _resource_src = os.path.join(test_resource_path,
+ _resource_file)
+ _res_dest = os.path.join(resource_output_path,
+ _resource_file)
+ elif _src_type == 'out':
+ _resource_src = os.path.join(part_build_out_path,
+ _resource_file)
+ _res_dest = os.path.join(resource_output_path,
+ _resource_file)
+ else:
+ raise Exception(
+ "resource src type '{}' not support.".format(
+ _src_type))
+ if _resource_src:
+ _out_resources_list.append({
+ "src":
+ os.path.relpath(_resource_src),
+ "dest":
+ os.path.relpath(_res_dest)
+ })
+ return _out_resources_list
+
+
+def find_testcase_resources(resource_config_file, testcase_target_name,
+ test_resource_path, part_build_out_path,
+ resource_output_path):
+ if not os.path.exists(resource_config_file):
+ return []
+ tree = ET.parse(resource_config_file)
+ root = tree.getroot()
+ if root.attrib.get('ver') == '2.0':
+ _resources_list = _resources_with_xml_v2(root, testcase_target_name,
+ test_resource_path,
+ part_build_out_path,
+ resource_output_path)
+ else:
+ _resources_list = _resources_with_xml_v1(root, testcase_target_name,
+ test_resource_path,
+ part_build_out_path,
+ resource_output_path)
+ # copy ohos_test.xml
+ _resources_list.append({
+ "src":
+ resource_config_file,
+ "dest":
+ os.path.join(resource_output_path,
+ os.path.basename(resource_config_file))
+ })
+ return _resources_list
+
+
+def copy_testcase_resources(resource_infos):
+ result_dest_list = []
+ for resource_info in resource_infos:
+ src_file = resource_info.get('src')
+ if not os.path.exists(src_file):
+ print("warning: testcase resource {} doesn't exist.".format(
+ src_file))
+ return
+ dest_file = resource_info.get('dest')
+ dest_dir = os.path.dirname(dest_file)
+ if os.path.isdir(src_file):
+ result_files = copy_dir(src_file, dest_file)
+ result_dest_list.extend(result_files)
+ else:
+ if not os.path.exists(dest_dir):
+ os.makedirs(dest_dir, exist_ok=True)
+ shutil.copy2(src_file, dest_file)
+ if src_file:
+ result_dest_list.append(src_file)
+ result_dest_list.append(dest_file)
+ return result_dest_list
+
+
+def _get_subsystem_name(part_name):
+ subsystem_parts_file = 'build_configs/parts_info/subsystem_parts.json'
+ subsystem_parts_info = read_json_file(subsystem_parts_file)
+ if subsystem_parts_info is None:
+ raise Exception("read file '{}' failed.".format(subsystem_parts_file))
+ for name, p_list in subsystem_parts_info.items():
+ if part_name in p_list:
+ return name
+ return None
+
+
+def _get_subsystem_path(part_name):
+ subsystem_name = _get_subsystem_name(part_name)
+ if subsystem_name is None:
+ return None
+ subsystem_build_config_file = os.path.join('build_configs/subsystem_info',
+ 'subsystem_build_config.json')
+ config_info = read_json_file(subsystem_build_config_file)
+ if config_info is None:
+ raise Exception(
+ "read file '{}' failed.".format(subsystem_build_config_file))
+ subsystem_infos = config_info.get('subsystem')
+ info = subsystem_infos.get(subsystem_name)
+ if info is None:
+ raise Exception(
+ "subsystem '{}' info doesn't exist.".format(subsystem_name))
+ subsystem_path = info.get('path')
+ return subsystem_path
+
+
+def _parse_module_out_path(module_out_path):
+ split_re = module_out_path.split('/', 1)
+ part_name = split_re[0]
+ module_name = split_re[1]
+ return part_name, module_name
+
+
+def _find_resource_config_file(config_file_name, subsystem_path, module_name):
+ resource_config_file = os.path.join('../../', subsystem_path,
+ 'test/resource', module_name,
+ config_file_name)
+ # compatibility
+ if not os.path.exists(resource_config_file):
+ module_dirs = module_name.split('/')
+ _dirs_num = len(module_dirs)
+ _dir_name = os.path.dirname(resource_config_file)
+ while _dirs_num > 1:
+ _dir_name = os.path.dirname(_dir_name)
+ resource_config_file = os.path.join(_dir_name, config_file_name)
+ if os.path.exists(resource_config_file):
+ break
+ _dirs_num -= 1
+ return resource_config_file
+
+
+def _get_res_config_file(module_out_path):
+ part_name, module_name = _parse_module_out_path(module_out_path)
+ subsystem_path = _get_subsystem_path(part_name)
+ if subsystem_path is None:
+ return None
+ resource_config_file = _find_resource_config_file('ohos_test.xml',
+ subsystem_path,
+ module_name)
+ if not os.path.exists(resource_config_file):
+ resource_config_file = _find_resource_config_file(
+ 'harmony_test.xml', subsystem_path, module_name)
+ return resource_config_file
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--resource-config-file', required=False)
+ parser.add_argument('--testcase-target-name', required=True)
+ parser.add_argument('--part-build-out-path', required=True)
+ parser.add_argument('--resource-output-path', required=True)
+ parser.add_argument('--module-out-path', required=False)
+ parser.add_argument('--output-file', required=True)
+ parser.add_argument('--depfile', required=False)
+ args = parser.parse_args()
+ if not args.resource_config_file:
+ if not args.module_out_path:
+ raise Exception('Missing parameter module_out_path.')
+ resource_config_file = _get_res_config_file(args.module_out_path)
+ if resource_config_file is None:
+ print("warning: cannot find resource config file, target: '{}'".
+ format(args.testcase_target_name))
+ return 0
+ if not os.path.exists(resource_config_file):
+ return 0
+ else:
+ resource_config_file = args.resource_config_file
+ if not os.path.exists(resource_config_file):
+ raise Exception(
+ "testcase '{}' resource_config_file config incorrect.".format(
+ args.testcase_target_name))
+
+ test_resource_path = os.path.dirname(resource_config_file)
+ resources_list = find_testcase_resources(resource_config_file,
+ args.testcase_target_name,
+ test_resource_path,
+ args.part_build_out_path,
+ args.resource_output_path)
+ write_json_file(args.output_file, resources_list)
+ result_dest_list = copy_testcase_resources(resources_list)
+ if args.depfile and result_dest_list:
+ result_dest_list.sort()
+ build_utils.write_depfile(args.depfile,
+ args.output_file,
+ result_dest_list,
+ add_pydeps=False)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/testfwk/testcase_resource_copy.pydeps b/dsoftbus/build/ohos/testfwk/testcase_resource_copy.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..8f3ceda925c1bfc561655f1fc1e9d8ecaff3c51a
--- /dev/null
+++ b/dsoftbus/build/ohos/testfwk/testcase_resource_copy.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos/testfwk --output build/ohos/testfwk/testcase_resource_copy.pydeps build/ohos/testfwk/testcase_resource_copy.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/file_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+testcase_resource_copy.py
diff --git a/dsoftbus/build/ohos/version.py b/dsoftbus/build/ohos/version.py
new file mode 100755
index 0000000000000000000000000000000000000000..53c8c619cd91e867176e4e3cdff267375f259011
--- /dev/null
+++ b/dsoftbus/build/ohos/version.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--version', required=True)
+
+ options = parser.parse_args(args)
+ versions = options.version.split(sep='.')
+ for ver in versions:
+ print(ver)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/ohos/write_kits_description.py b/dsoftbus/build/ohos/write_kits_description.py
new file mode 100755
index 0000000000000000000000000000000000000000..aaff3d7e4adf043cfe3ab584f152a5a0b433cd4d
--- /dev/null
+++ b/dsoftbus/build/ohos/write_kits_description.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import sys
+import os
+import zipfile
+import json
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__))))
+from scripts.util import build_utils # noqa: E402
+
+
+def get_java_sources_file(build_config):
+ with open(build_config, 'r') as file:
+ data = json.load(file)
+ return data.get('deps_info').get('java_sources_file')
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ build_utils.add_depfile_option(parser)
+
+ parser.add_argument('--input', required=True)
+ parser.add_argument('--output', default=None)
+
+ options = parser.parse_args()
+
+ depfiles = []
+ with open(options.input, 'r') as file:
+ kits_info = json.load(file)
+ for module in kits_info:
+ if module.get('type') != 'jar':
+ continue
+ build_config = module.get('build_config')
+ depfiles.append(build_config)
+ module['source_list_file'] = get_java_sources_file(build_config)
+
+ build_utils.write_json(kits_info, options.output)
+
+ build_utils.write_depfile(options.depfile,
+ options.output,
+ depfiles,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/ohos/write_kits_description.pydeps b/dsoftbus/build/ohos/write_kits_description.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..cc3945ff75b6bc22ed010a2f05ac7d4a3bf21ffb
--- /dev/null
+++ b/dsoftbus/build/ohos/write_kits_description.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/ohos --output build/ohos/write_kits_description.pydeps build/ohos/write_kits_description.py
+../gn_helpers.py
+../scripts/__init__.py
+../scripts/util/__init__.py
+../scripts/util/build_utils.py
+../scripts/util/md5_check.py
+../scripts/util/pycache.py
+write_kits_description.py
diff --git a/dsoftbus/build/ohos_system.prop b/dsoftbus/build/ohos_system.prop
new file mode 100644
index 0000000000000000000000000000000000000000..06492f7fe4c72747cfc831ab4f912d75ed8634f1
--- /dev/null
+++ b/dsoftbus/build/ohos_system.prop
@@ -0,0 +1,27 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# whether ohos is enabled.
+hw_sc.build.os.enable=true
+
+# ohos API version number.
+hw_sc.build.os.apiversion=7
+
+# ohos system version.
+hw_sc.build.os.version=3.0.0
+
+# ohos device type please use hw_sc.build.os.devicetype
+
+# ohos release type
+hw_sc.build.os.releasetype=Release
diff --git a/dsoftbus/build/ohos_var.gni b/dsoftbus/build/ohos_var.gni
new file mode 100755
index 0000000000000000000000000000000000000000..4930fd4d2d7c81b1bdc397de7611e359e953495e
--- /dev/null
+++ b/dsoftbus/build/ohos_var.gni
@@ -0,0 +1,202 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/version.gni")
+
+declare_args() {
+ # build ohos version
+ build_public_version = true
+}
+
+declare_args() {
+ # build system type
+ is_mini_system = false
+ is_small_system = false
+ is_standard_system = false
+}
+
+declare_args() {
+ is_large_system = !(is_standard_system || is_small_system || is_mini_system)
+}
+
+declare_args() {
+ enable_java = is_large_system
+}
+
+declare_args() {
+ build_version_alpha = true
+}
+
+declare_args() {
+ # Whether build the qemu images, this image is usd to run on emulator device.
+ build_qemu_image = false
+
+ # system package dir
+ system_base_dir = "system"
+
+ # ramdisk package dir
+ ramdisk_base_dir = "root"
+
+ # vendor package dir
+ vendor_base_dir = "vendor"
+
+ # updater package dir
+ updater_base_dir = "updater"
+
+ # check sdk interface
+ sdk_interface_check = true
+
+ # is use sdk
+ is_use_sdk = false
+
+ # build test example
+ build_example = false
+
+ # check intersdk interface
+ check_innersdk_interface = true
+ gen_innersdk_interface_signature = false
+
+ # sdk check
+ check_sdk_interface = true
+ gen_sdk_interface_signature = false
+
+ # Whether to collect the ebpf testcase.
+ ebpf_enable = false
+
+ # Info files of test targets will remove source file paths when setting true.
+ release_test_suite = false
+
+ # Whether to enable scalable build.
+ scalable_build = false
+
+ # Whether to enable pycache or not.
+ pycache_enable = true
+
+ # Whether to build js bundle to ark bytecode
+ build_ark = false
+}
+
+declare_args() {
+ build_platform = ""
+
+ product_name = ""
+
+ sparse_image = false
+}
+
+declare_args() {
+ system_kits_package = false
+
+ kits_api_statistics = false
+
+ # ohos sdk
+ build_ohos_sdk = false
+
+ # Build ohos sdk compatibility jar
+ build_bc_sdk = false
+}
+
+declare_args() {
+ # Build ohos NDK
+ build_ohos_ndk = false
+
+ # Which OS the generated NDK works on. possible values: 'win/mac/linux/default'
+ ndk_system = "default"
+
+ # Whether to archive or not, set to false in debug to save ndk build time.
+ archive_ndk = true
+
+ # Check ndk header signature, set false to generate header signature
+ check_ndk_signature = false
+}
+
+declare_args() {
+ archive_component = false
+}
+
+declare_args() {
+ build_xts = false
+}
+
+declare_args() {
+ build_asan_clean = false
+}
+
+declare_args() {
+ check_deps = false
+}
+
+# build target type
+target_type = "${target_os}-${target_cpu}"
+
+parts_src_flag_file = "$root_build_dir/build_configs/parts_src_flag.json"
+
+innersdk_dir_name = "sdk/${target_type}"
+innersdk_base_dir = "//${innersdk_dir_name}"
+
+# Compile and generate output directory
+innersdk_build_out_dir = "innerkits/${target_type}"
+
+dist_dir_name = "component_dist/${target_type}"
+dist_build_out_dir = "${root_build_dir}/component_dist/${target_type}"
+dist_subsystem_info_filename = "dist_parts_info.json"
+
+# install packages archive dir define
+dist_install_packages_dir = "${dist_dir_name}/packages_to_install"
+dist_subsystem_info_file =
+ "//${dist_install_packages_dir}/${dist_subsystem_info_filename}"
+dist_build_install_packages_dir = "${dist_build_out_dir}/packages_to_install"
+dist_subsystem_info_output_file =
+ "${dist_build_install_packages_dir}/${dist_subsystem_info_filename}"
+
+ramdisk_dir_list = []
+
+product_build_config =
+ read_file("//out/build_configs/${product_name}/preloader/build_config.json",
+ "json")
+product_company = product_build_config.product_company
+device_name = product_build_config.device_name
+device_company = product_build_config.device_company
+
+app_target_allowlist = [
+ "*:*_res",
+ "*:*_resources",
+ "*:*_resource",
+ "*:*_assets",
+ "*:*_asset",
+]
+app_target_types = [
+ "hap",
+ "resources",
+ "js_assets",
+ "assets",
+]
+
+restool = "//prebuilts/build-tools/common/restool/restool"
+hapsigner = "//prebuilts/signcenter/hapsigntool/hapsigntoolv2.jar"
+hap_unpacking_tool =
+ "//developtools/packing_tool/jar/hmos_app_unpacking_tool.jar"
+hap_packing_tool = "//developtools/packing_tool/jar/hmos_app_packing_tool.jar"
+ace_loader_home = "//prebuilts/sdk/js-loader/build-tools/ace-loader"
+nodejs_version = "v12.18.4"
+nodejs = "//prebuilts/build-tools/common/nodejs/node-${nodejs_version}-${host_os}-x64/bin/node"
+ace_loader_home = "//prebuilts/sdk/js-loader/build-tools/ace-loader/"
+webpack_js = "${ace_loader_home}/node_modules/webpack/bin/webpack.js"
+webpack_config_js = "${ace_loader_home}/webpack.rich.config.js"
+default_hap_private_key_path = "OpenHarmony Application Release"
+default_signature_algorithm = "SHA256withECDSA"
+default_key_alias = "123456"
+default_keystore_password = "123456"
+default_keystore_path = "//prebuilts/signcenter/key/OpenHarmony.p12"
+default_hap_certificate_file =
+ "//prebuilts/signcenter/certificates/OpenHarmonyApplication.pem"
diff --git a/dsoftbus/build/prebuilts_download.sh b/dsoftbus/build/prebuilts_download.sh
new file mode 100755
index 0000000000000000000000000000000000000000..dbae1e43ebfc7e516b77b0c97b58e0b65de6f77b
--- /dev/null
+++ b/dsoftbus/build/prebuilts_download.sh
@@ -0,0 +1,261 @@
+#!/bin/bash
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+set -e
+for i in "$@"; do
+ case "$i" in
+ -skip-ssl|--skip-ssl) # wget、npm跳过ssl检查,如使用此参数:
+ # 黑客等不法分子可以篡改或窃取客户端和服务器之间传输的信息和数据,从而影响用户的数据安全!
+ SKIP_SSL=YES
+ ;;
+ esac
+done
+if [ "X${SKIP_SSL}" == "XYES" ];then
+ wget_ssl_check='--no-check-certificate'
+else
+ wget_ssl_check=''
+fi
+sha256_result=0
+check_sha256=''
+local_sha256=''
+function check_sha256(){
+ success_color='\033[1;42mSuccess\033[0m'
+ failed_color='\033[1;41mFailed\033[0m'
+ check_url=$1 #来源URL
+ local_file=$2 #本地文件绝对路径
+ check_sha256=$(curl -s -k ${check_url}.sha256) # 当前使用华为云,URL固定,所以写死了,后续如果有变动,此处需要修改
+ local_sha256=$(sha256sum ${local_file} |awk '{print $1}')
+ if [ "X${check_sha256}" == "X${local_sha256}" ];then
+ echo -e "${success_color},${check_url} Sha256 check OK."
+ sha256_result=0
+ else
+ echo -e "${failed_color},${check_url} Sha256 check Failed.Retry!"
+ sha256_result=1
+ #exit 1 # 默认退出,必须保证sha256一致,如有特殊需要,请自行注释
+ fi
+}
+function hwcloud_download(){
+ # 代理不需要鉴权: wget -t3 -T10 -O ${bin_dir} -e "https_proxy=http://domain.com:port" ${huaweicloud_url}
+ # 代理需要鉴权(账号密码特殊字符均需要URL转义): wget -t3 -T10 -O ${bin_dir} -e "https_proxy=http://username:password@domain.com:port" ${huaweicloud_url}
+ # 不需要代理
+ download_local_file=$1
+ download_source_url=$2
+ for((i=1;i<=3;i++));
+ do
+ if [ -f "${download_local_file}" ];then
+ check_sha256 "${download_source_url}" "${download_local_file}"
+ if [ ${sha256_result} -gt 0 ];then
+ # 设置变量默认值,防止误删除
+ rm -rf "${download_local_file:-/tmp/20210721_not_exit_file}"
+ else
+ i=999
+ return 0
+ fi
+ fi
+ if [ ! -f "${download_local_file}" ];then
+ wget -t3 -T10 ${wget_ssl_check} -O "${download_local_file}" "${download_source_url}"
+ fi
+ done
+ # 连续三次失败后报错退出
+ echo -e """Sha256 check failed!
+Download URL: ${download_source_url}
+Local file: ${download_local_file}
+Remote sha256: ${check_sha256}
+Local sha256: ${local_sha256}"""
+ exit 1
+}
+
+case $(uname -s) in
+ Linux)
+ host_platform=linux
+ ;;
+ Darwin)
+ host_platform=darwin
+ ;;
+ *)
+ echo "Unsupported host platform: $(uname -s)"
+ exit 1
+esac
+
+# 代码下载目录
+script_path=$(cd $(dirname $0);pwd)
+code_dir=$(dirname ${script_path})
+# 二进制所在目录,用于临时存放二进制,需要约7G空间
+# 下载的压缩包会自动解压到代码目录,压缩包会一直保留在该目录下
+bin_dir=${code_dir}/../OpenHarmony_prebuilts_pkgs
+
+# 二进制关系
+copy_config="""
+prebuilts/sdk/js-loader/build-tools,https://repo.huaweicloud.com/harmonyos/compiler/ace-loader/1.0/ace-loader-1.0.tar.gz
+prebuilts/build-tools/common,https://repo.huaweicloud.com/harmonyos/compiler/restool/1.023-d/restool.tar.gz
+prebuilts/cmake,https://repo.huaweicloud.com/harmonyos/compiler/cmake/3.16.5/${host_platform}/cmake-${host_platform}-x86-3.16.5.tar.gz
+prebuilts/build-tools/${host_platform}-x86/bin,https://repo.huaweicloud.com/harmonyos/compiler/gn/1717/${host_platform}/gn-${host_platform}-x86-1717.tar.gz
+prebuilts/build-tools/${host_platform}-x86/bin,https://repo.huaweicloud.com/harmonyos/compiler/ninja/1.10.1/${host_platform}/ninja-${host_platform}-x86-1.10.1.tar.gz
+prebuilts/python,https://repo.huaweicloud.com/harmonyos/compiler/python/3.8.5/${host_platform}/python-${host_platform}-x86-3.8.5.tar.gz
+prebuilts/clang/ohos/${host_platform}-x86_64,https://repo.huaweicloud.com/harmonyos/compiler/clang/10.0.1-73276/${host_platform}/clang-73276-release-${host_platform}-x86_64.tar.bz2
+"""
+
+if [[ "${host_platform}" == "linux" ]]; then
+ copy_config+="""
+ prebuilts/cmake,https://repo.huaweicloud.com/harmonyos/compiler/cmake/3.16.5/windows/cmake-windows-x86-3.16.5.tar.gz
+ prebuilts/mingw-w64/ohos/linux-x86_64,https://repo.huaweicloud.com/harmonyos/compiler/mingw-w64/7.0.0/clang-mingw.tar.gz
+ prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi,https://repo.huaweicloud.com/harmonyos/compiler/prebuilts_gcc_linux-x86_arm_gcc-linaro-7.5.0-arm-linux-gnueabi/1.0/prebuilts_gcc_linux-x86_arm_gcc-linaro-7.5.0-arm-linux-gnueabi.tar.gz
+ prebuilts/gcc/linux-x86/aarch64,https://repo.huaweicloud.com/harmonyos/compiler/prebuilts_gcc_linux-x86_arm_gcc-linaro-7.5.0-arm-linux-gnueabi/1.0/gcc-linaro-7.5.0-2019.12-x86_64_aarch64-linux-gnu.tar.xz
+ prebuilts/previewer/windows,https://repo.huaweicloud.com/harmonyos/develop_tools/previewer/3.0.0.0/windows/previewer.tar.gz
+ """
+elif [[ "${host_platform}" == "darwin" ]]; then
+ copy_config+="""
+ prebuilts/previewer/darwin,https://repo.huaweicloud.com/harmonyos/develop_tools/previewer/3.0.0.0/darwin/previewer.tar.gz
+ """
+fi
+
+if [ ! -d "${bin_dir}" ];then
+ mkdir -p "${bin_dir}"
+fi
+
+for i in $(echo ${copy_config})
+do
+ unzip_dir=$(echo $i|awk -F ',' '{print $1}')
+ huaweicloud_url=$(echo $i|awk -F ',' '{print $2}')
+ md5_huaweicloud_url=$(echo ${huaweicloud_url}|md5sum|awk '{print $1}')
+ bin_file=$(basename ${huaweicloud_url})
+ bin_file_suffix=${bin_file#*.}
+ #huaweicloud_file_name=$(echo ${huaweicloud_url}|awk -F '/' '{print $NF}')
+
+ if [ ! -d "${code_dir}/${unzip_dir}" ];then
+ mkdir -p "${code_dir}/${unzip_dir}"
+ fi
+ hwcloud_download "${bin_dir}/${md5_huaweicloud_url}.${bin_file_suffix}" "${huaweicloud_url}"
+ if [ "X${bin_file_suffix:0-3}" = "Xzip" ];then
+ unzip "${bin_dir}/${md5_huaweicloud_url}.${bin_file_suffix}" -d "${code_dir}/${unzip_dir}/"
+ elif [ "X${bin_file_suffix:0-6}" = "Xtar.gz" ];then
+ tar -xvzf "${bin_dir}/${md5_huaweicloud_url}.${bin_file_suffix}" -C "${code_dir}/${unzip_dir}"
+ else
+ tar -xvf "${bin_dir}/${md5_huaweicloud_url}.${bin_file_suffix}" -C "${code_dir}/${unzip_dir}"
+ fi
+ # 由于部分压缩包包含了目录,用于专门处理多余目录
+ if [ -d "${code_dir}/prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi/prebuilts_gcc_linux-x86_arm_gcc-linaro-7.5.0-arm-linux-gnueabi" ];then
+ mv "${code_dir}/prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi/prebuilts_gcc_linux-x86_arm_gcc-linaro-7.5.0-arm-linux-gnueabi" "${code_dir}/prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi2/"
+ rm -rf "${code_dir}/prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi"
+ mv "${code_dir}/prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi2/" "${code_dir}/prebuilts/gcc/linux-x86/arm/gcc-linaro-7.5.0-arm-linux-gnueabi/"
+ fi
+ if [ -d "${code_dir}/prebuilts/clang/ohos/linux-x86_64/clang-73276-release" ];then
+ rm -rf "${code_dir}/prebuilts/clang/ohos/linux-x86_64/llvm"
+ mv "${code_dir}/prebuilts/clang/ohos/linux-x86_64/clang-73276-release" "${code_dir}/prebuilts/clang/ohos/linux-x86_64/llvm"
+ ln -snf 10.0.1 "${code_dir}/prebuilts/clang/ohos/linux-x86_64/llvm/lib/clang/current"
+ fi
+ if [ -d "${code_dir}/prebuilts/clang/ohos/darwin-x86_64/clang-73276-release" ];then
+ rm -rf "${code_dir}/prebuilts/clang/ohos/darwin-x86_64/llvm"
+ mv "${code_dir}/prebuilts/clang/ohos/darwin-x86_64/clang-73276-release" "${code_dir}/prebuilts/clang/ohos/darwin-x86_64/llvm"
+ ln -snf 10.0.1 "${code_dir}/prebuilts/clang/ohos/darwin-x86_64/llvm/lib/clang/current"
+ fi
+done
+
+
+node_js_ver=v12.18.4
+node_js_name=node-${node_js_ver}-${host_platform}-x64
+node_js_pkg=${node_js_name}.tar.gz
+mkdir -p ${code_dir}/prebuilts/build-tools/common/nodejs
+cd ${code_dir}/prebuilts/build-tools/common/nodejs
+if [ ! -f "${node_js_pkg}" ]; then
+ wget -t3 -T10 ${wget_ssl_check} https://repo.huaweicloud.com/nodejs/${node_js_ver}/${node_js_pkg}
+ tar zxf ${node_js_pkg}
+fi
+
+if [ ! -d "${code_dir}/third_party/jsframework" ]; then
+ echo "${code_dir}/third_party/jsframework not exist, it shouldn't happen, pls check..."
+else
+ cd ${code_dir}/third_party/jsframework/
+ export PATH=${code_dir}/prebuilts/build-tools/common/nodejs/${node_js_name}/bin:$PATH
+ npm config set registry http://registry.npm.taobao.org
+ if [ "X${SKIP_SSL}" == "XYES" ];then
+ npm config set strict-ssl false
+ fi
+ npm cache clean -f
+ npm install
+
+ cd ${code_dir}
+ if [ -d "${code_dir}/prebuilts/build-tools/common/js-framework" ]; then
+ echo -e "\n"
+ echo "${code_dir}/prebuilts/build-tools/common/js-framework already exist, it will be replaced with node-${node_js_ver}"
+ /bin/rm -rf ${code_dir}/prebuilts/build-tools/common/js-framework
+ echo -e "\n"
+ fi
+
+ mkdir -p ${code_dir}/prebuilts/build-tools/common/js-framework
+ /bin/cp -rf ${code_dir}/third_party/jsframework/node_modules ${code_dir}/prebuilts/build-tools/common/js-framework/
+fi
+
+if [ ! -d "${code_dir}/developtools/ace-ets2bundle/compiler" ]; then
+ echo "${code_dir}/developtools/ace-ets2bundle/compiler not exist, it shouldn't happen, pls check..."
+else
+ cd ${code_dir}/developtools/ace-ets2bundle/compiler
+ export PATH=${code_dir}/prebuilts/build-tools/common/nodejs/${node_js_name}/bin:$PATH
+ npm config set registry http://registry.npm.taobao.org
+ if [ "X${SKIP_SSL}" == "XYES" ];then
+ npm config set strict-ssl false
+ fi
+ npm cache clean -f
+ npm install
+fi
+
+if [ ! -d "${code_dir}/developtools/ace-js2bundle/ace-loader" ]; then
+ echo "${code_dir}/developtools/ace-js2bundle/ace-loader not exist, it shouldn't happen, pls check..."
+else
+ cd ${code_dir}/developtools/ace-js2bundle/ace-loader
+ export PATH=${code_dir}/prebuilts/build-tools/common/nodejs/${node_js_name}/bin:$PATH
+ npm config set registry http://registry.npm.taobao.org
+ if [ "X${SKIP_SSL}" == "XYES" ];then
+ npm config set strict-ssl false
+ fi
+ npm cache clean -f
+ npm install
+fi
+
+if [ -d "${code_dir}/ark/ts2abc/ts2panda" ]; then
+ cd ${code_dir}/ark/ts2abc/ts2panda
+ export PATH=${code_dir}/prebuilts/build-tools/common/nodejs/${node_js_name}/bin:$PATH
+ npm config set registry http://registry.npm.taobao.org
+ if [ "X${SKIP_SSL}" == "XYES" ];then
+ npm config set strict-ssl false
+ fi
+ npm cache clean -f
+ npm install
+
+ cd ${code_dir}
+ if [ -d "${code_dir}/prebuilts/build-tools/common/ts2abc" ]; then
+ echo -e "\n"
+ echo "${code_dir}/prebuilts/build-tools/common/ts2abc already exist, it will be replaced with node-${node_js_ver}"
+ /bin/rm -rf ${code_dir}/prebuilts/build-tools/common/ts2abc
+ echo -e "\n"
+ fi
+
+ mkdir -p ${code_dir}/prebuilts/build-tools/common/ts2abc
+ /bin/cp -rf ${code_dir}/ark/ts2abc/ts2panda/node_modules ${code_dir}/prebuilts/build-tools/common/ts2abc/
+fi
+
+#安装鸿蒙sdk中js组件的相关依赖
+if [ -d "${code_dir}/prebuilts/sdk/js-loader/build-tools/ace-loader" ]; then
+ cd ${code_dir}/prebuilts/sdk/js-loader/build-tools/ace-loader
+ export PATH=${code_dir}/prebuilts/build-tools/common/nodejs/${node_js_name}/bin:$PATH
+ npm config set registry http://registry.npm.taobao.org
+ npm config set @ohos:registry=https://repo.harmonyos.com/npm/
+ if [ "X${SKIP_SSL}" == "XYES" ];then
+ npm config set strict-ssl false
+ fi
+ npm cache clean -f
+ npm install
+fi
+
+cd ${code_dir}
+echo -e "\n"
diff --git a/dsoftbus/build/print_python_deps.py b/dsoftbus/build/print_python_deps.py
new file mode 100755
index 0000000000000000000000000000000000000000..304490c42f1b0674869bc022bd0d066025800b6f
--- /dev/null
+++ b/dsoftbus/build/print_python_deps.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints all non-system dependencies for the given module.
+
+The primary use-case for this script is to generate the list of python
+modules required for .isolate files.
+"""
+
+import argparse
+import imp
+import os
+import pipes
+import sys
+
+# Don't use any helper modules, or else they will end up in the results.
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
+
+
+def _compute_python_dependencies():
+ """Gets the paths of imported non-system python modules.
+
+ A path is assumed to be a "system" import if it is outside of chromium's
+ src/. The paths will be relative to the current directory.
+ """
+ module_paths = (m.__file__ for m in sys.modules.values()
+ if m and hasattr(m, '__file__'))
+
+ src_paths = set()
+ for path in module_paths:
+ if path == __file__:
+ continue
+ path = os.path.abspath(path)
+ if not path.startswith(_SRC_ROOT):
+ continue
+
+ if (path.endswith('.pyc')
+ or (path.endswith('c') and not os.path.splitext(path)[1])):
+ path = path[:-1]
+ src_paths.add(path)
+
+ return src_paths
+
+
+def _normalize_command_line(options):
+ """Returns a string that when run from SRC_ROOT replicates the command."""
+ args = ['build/print_python_deps.py']
+ root = os.path.relpath(options.root, _SRC_ROOT)
+ if root != '.':
+ args.extend(('--root', root))
+ if options.output:
+ args.extend(('--output', os.path.relpath(options.output, _SRC_ROOT)))
+ if options.gn_paths:
+ args.extend(('--gn-paths', ))
+ for allowlist in sorted(options.allowlists):
+ args.extend(('--allowlist', os.path.relpath(allowlist, _SRC_ROOT)))
+ args.append(os.path.relpath(options.module, _SRC_ROOT))
+ return ' '.join(pipes.quote(x) for x in args)
+
+
+def _find_python_in_directory(directory):
+ """Returns an iterable of all non-test python files in the given directory."""
+ for root, _dirnames, filenames in os.walk(directory):
+ for filename in filenames:
+ if filename.endswith('.py') and not filename.endswith('_test.py'):
+ yield os.path.join(root, filename)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Prints all non-system dependencies for the given module.')
+ parser.add_argument('module', help='The python module to analyze.')
+ parser.add_argument('--root',
+ default='.',
+ help='Directory to make paths relative to.')
+ parser.add_argument('--output',
+ help='Write output to a file rather than stdout.')
+ parser.add_argument(
+ '--inplace',
+ action='store_true',
+ help='Write output to a file with the same path as the '
+ 'module, but with a .pydeps extension. Also sets the '
+ 'root to the module\'s directory.')
+ parser.add_argument('--no-header',
+ action='store_true',
+ help='Do not write the "# Generated by" header.')
+ parser.add_argument('--gn-paths',
+ action='store_true',
+ help='Write paths as //foo/bar/baz.py')
+ parser.add_argument('--allowlist',
+ default=[],
+ action='append',
+ dest='allowlists',
+ help='Recursively include all non-test python files '
+ 'within this directory. '
+ 'May be specified multiple times.')
+ options = parser.parse_args()
+ # Replace the path entry for print_python_deps.py with the one for the given
+ # module.
+ sys.path[0] = os.path.dirname(options.module)
+ imp.load_source('NAME', options.module)
+
+ if options.inplace:
+ if options.output:
+ parser.error('Cannot use --inplace and --output at the same time!')
+ if not options.module.endswith('.py'):
+ parser.error('Input module path should end with .py suffix!')
+ options.output = options.module + 'deps'
+ options.root = os.path.dirname(options.module)
+
+ paths_set = _compute_python_dependencies()
+ for path in options.allowlists:
+ paths_set.update(
+ os.path.abspath(p) for p in _find_python_in_directory(path))
+
+ paths = [os.path.relpath(p, options.root) for p in paths_set]
+
+ normalized_cmdline = _normalize_command_line(options)
+ try:
+ with (open(options.output, 'w')
+ if options.output else sys.stdout) as out:
+ if not options.no_header:
+ out.write('# Generated by running:\n')
+ out.write('# %s\n' % normalized_cmdline)
+ prefix = '//' if options.gn_paths else ''
+ for path in sorted(paths):
+ out.write(prefix + path + '\n')
+ except OSError as err:
+ raise err
+ finally:
+ out.close()
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/__init__.py b/dsoftbus/build/scripts/__init__.py
new file mode 100755
index 0000000000000000000000000000000000000000..d7b3a083706fb60581a81c2a917e927139f61f7f
--- /dev/null
+++ b/dsoftbus/build/scripts/__init__.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/dsoftbus/build/scripts/asan_backup.py b/dsoftbus/build/scripts/asan_backup.py
new file mode 100755
index 0000000000000000000000000000000000000000..7bb1709161e5253f65fbf708b532be910805ab09
--- /dev/null
+++ b/dsoftbus/build/scripts/asan_backup.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import shutil
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file
+
+
+def _get_toolchain_name(toolchain_label):
+ return toolchain_label.split(':')[1]
+
+
+def _remove_unstripped_dir(toolchain_info_file):
+ data = read_json_file(toolchain_info_file)
+ if data is None:
+ raise Exception("read file '{}' failed.".format(toolchain_info_file))
+ platform_toolchain = data.get('platform_toolchain')
+ base_dir_list = []
+ for key, val in platform_toolchain.items():
+ if key == 'phone':
+ base_dir_list.append('.')
+ else:
+ toolchain_name = val.split(':')[1]
+ base_dir_list.append(toolchain_name)
+ dir_list = ['lib.unstripped', 'exe.unstripped']
+ for _base_dir in base_dir_list:
+ for _dir_name in dir_list:
+ _dir = os.path.join(_base_dir, _dir_name)
+ if os.path.exists(_dir):
+ shutil.rmtree(_dir)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--backup-dir', required=True)
+ parser.add_argument('--backup-dest-dir', required=True)
+ parser.add_argument('--asan-clean', dest='asan_clean', action='store_true')
+ parser.set_defaults(asan_clean=False)
+ parser.add_argument('--removed-dir-list', nargs='*', default=[])
+ parser.add_argument('--platforms-toolchain-info-file')
+ args = parser.parse_args()
+
+ if not os.path.exists(args.backup_dir):
+ return 0
+
+ if os.path.exists(args.backup_dest_dir):
+ shutil.rmtree(args.backup_dest_dir)
+ os.makedirs(args.backup_dest_dir, exist_ok=True)
+
+ dir_name = os.path.basename(args.backup_dir)
+ _dest_dir = os.path.join(args.backup_dest_dir, dir_name)
+ shutil.copytree(args.backup_dir, _dest_dir)
+
+ if args.asan_clean:
+ if args.removed_dir_list:
+ for _dir in args.removed_dir_list:
+ if os.path.exists(_dir):
+ shutil.rmtree(_dir)
+ if args.platforms_toolchain_info_file:
+ _remove_unstripped_dir(args.platforms_toolchain_info_file)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/build_js_assets.py b/dsoftbus/build/scripts/build_js_assets.py
new file mode 100755
index 0000000000000000000000000000000000000000..624d5940cf28c7e849e44955fac90a3cd4810076
--- /dev/null
+++ b/dsoftbus/build/scripts/build_js_assets.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import tempfile
+import json
+import shutil
+
+from util import build_utils # noqa: E402
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output', help='stamp file')
+ parser.add_option('--js-assets-dir', help='js assets directory')
+ parser.add_option('--nodejs-path', help='path to nodejs app')
+ parser.add_option('--webpack-js', help='path to webpack.js')
+ parser.add_option('--webpack-config-js', help='path to webpack.config.js')
+ parser.add_option('--hap-profile', help='path to hap profile')
+ parser.add_option('--build-mode', help='debug mode or release mode')
+ parser.add_option('--js2abc',
+ action='store_true',
+ default=False,
+ help='whether to transform js to ark bytecode')
+
+ options, _ = parser.parse_args(args)
+ options.js_assets_dir = build_utils.parse_gn_list(options.js_assets_dir)
+ return options
+
+
+def build_ace(cmd, options):
+ with build_utils.temp_dir() as build_dir:
+ gen_dir = os.path.join(build_dir, 'gen')
+ manifest = os.path.join(build_dir, 'manifest.json')
+ my_env = {
+ "aceModuleRoot": options.js_assets_dir[0],
+ "aceModuleBuild": gen_dir,
+ "aceManifestPath": manifest,
+ "buildMode": options.build_mode,
+ "PATH": os.environ.get('PATH'),
+ }
+ if not os.path.exists(manifest) and options.hap_profile:
+ with open(options.hap_profile) as profile:
+ config = json.load(profile)
+ data = dict()
+ data['appID'] = config['app']['bundleName']
+ data['appName'] = config['module']['abilities'][0]['label']
+ data['versionName'] = config['app']['version']['name']
+ data['versionCode'] = config['app']['version']['code']
+ data['pages'] = config['module']['js'][0]['pages']
+ data['deviceType'] = config['module']['deviceType']
+ data['window'] = config['module']['js'][0]['window']
+ build_utils.write_json(data, manifest)
+ build_utils.check_output(cmd, env=my_env)
+ for root, _, files in os.walk(gen_dir):
+ for file in files:
+ filename = os.path.join(root, file)
+ if filename.endswith('.js.map'):
+ os.unlink(filename)
+ build_utils.zip_dir(options.output,
+ gen_dir,
+ zip_prefix_path='assets/js/default/')
+
+
+def main(args):
+ options = parse_args(args)
+
+ inputs = ([
+ options.nodejs_path, options.webpack_js, options.webpack_config_js
+ ])
+ depfiles = (build_utils.get_all_files(options.js_assets_dir[0]))
+
+ cmd = [
+ options.nodejs_path,
+ options.webpack_js,
+ '--config',
+ options.webpack_config_js,
+ ]
+ if options.js2abc:
+ cmd.extend(['--compilerType', 'ark'])
+
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: build_ace(cmd, options),
+ options,
+ depfile_deps=depfiles,
+ input_paths=depfiles + inputs,
+ input_strings=cmd + [options.build_mode],
+ output_paths=([options.output]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/scripts/build_js_assets.pydeps b/dsoftbus/build/scripts/build_js_assets.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..40a76bea708b81efd4136ecef20eeebd745d6701
--- /dev/null
+++ b/dsoftbus/build/scripts/build_js_assets.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/scripts --output build/scripts/build_js_assets.pydeps build/scripts/build_js_assets.py
+../gn_helpers.py
+build_js_assets.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/pycache.py
diff --git a/dsoftbus/build/scripts/build_target_handler.py b/dsoftbus/build/scripts/build_target_handler.py
new file mode 100755
index 0000000000000000000000000000000000000000..f6df81df7b6d0b58a8a415896a6576c6ee31501a
--- /dev/null
+++ b/dsoftbus/build/scripts/build_target_handler.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import ninja_rules_parser
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, read_file
+
+
+def _read_parts_variants(build_configs_dir):
+ parts_variants_file = os.path.join(build_configs_dir, 'parts_info',
+ 'parts_variants.json')
+ if not os.path.exists(parts_variants_file):
+ raise Exception("file '{}' doesn't exist.".format(parts_variants_file))
+ parts_variants = read_json_file(parts_variants_file)
+ if parts_variants is None:
+ raise Exception("read file '{}' failed.".format(parts_variants_file))
+ return parts_variants
+
+
+def get_build_target(build_dir, build_targets, target_platform):
+ if not target_platform or target_platform == 'all':
+ target_platform = 'phone'
+ real_targets = []
+ build_configs_dir = os.path.join(build_dir, 'build_configs')
+ parts_variants = _read_parts_variants(build_configs_dir)
+ for _target_name in build_targets:
+ if _target_name in parts_variants:
+ if target_platform == 'phone':
+ real_targets.append(_target_name)
+ else:
+ variants = parts_variants.get(_target_name)
+ if target_platform in variants:
+ real_targets.append('{0}/{1}_{0}'.format(
+ target_platform, _target_name))
+ else:
+ real_targets.append(_target_name)
+ else:
+ if target_platform == 'phone':
+ real_targets.append(_target_name)
+ else:
+ real_targets.append('{0}/{1}'.format(target_platform,
+ _target_name))
+ return real_targets
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--source-root-dir', required=True)
+ parser.add_argument('--root-build-dir', required=True)
+ parser.add_argument('--build-target-name', nargs='*')
+ parser.add_argument('--target-platform', required=False)
+ args = parser.parse_args()
+
+ source_root_dir = args.source_root_dir
+ if not os.path.exists(os.path.join(source_root_dir, '.gn')):
+ print('source root dir incorrect.')
+ return 1
+
+ build_dir = os.path.join(source_root_dir, args.root_build_dir)
+ # rewrite build.log
+ ninja_rules_parser.update(
+ build_dir, 'build_configs/platforms_info/toolchain_to_variant.json')
+
+ if args.target_platform is None or args.target_platform == 'all':
+ target_platform = None
+ else:
+ target_platform = args.target_platform
+ # process build targets
+ build_targets = get_build_target(build_dir, args.build_target_name,
+ target_platform)
+ # output build_targets string, be used on the ninja command line
+ print(' '.join(build_targets))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/code_release.py b/dsoftbus/build/scripts/code_release.py
new file mode 100755
index 0000000000000000000000000000000000000000..6ec727dfffcd254a265b6e608b12bdbf67d3ce45
--- /dev/null
+++ b/dsoftbus/build/scripts/code_release.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import shutil
+import tarfile
+sys.path.append(os.path.abspath(os.path.dirname(
+ os.path.abspath(os.path.dirname(__file__)))))
+from scripts.util.file_utils import read_json_file # noqa: E402
+
+RELEASE_FILENAME = 'README.OpenSource'
+scan_dir_list = ['third_party']
+
+
+def get_source_top_dir():
+ top_dir = os.path.abspath(os.path.dirname(
+ os.path.abspath(os.path.dirname(
+ os.path.abspath(os.path.dirname(__file__))))))
+ return top_dir
+
+
+def get_package_dir():
+ top_dir = get_source_top_dir()
+ package_dir = os.path.join(top_dir, 'out', 'Code_Opensource')
+ return package_dir
+
+
+def copy_opensource_file(opensource_config_file):
+ if not os.path.exists(opensource_config_file):
+ print("Warning, the opensource config file is not exists.")
+ return False
+
+ top_dir = get_source_top_dir()
+ package_dir = get_package_dir()
+ src_dir = os.path.dirname(opensource_config_file)
+ dst_dir = os.path.join(package_dir, os.path.relpath(src_dir, top_dir))
+
+ # copy opensource folder to out dir
+ if os.path.exists(dst_dir):
+ shutil.rmtree(dst_dir)
+ shutil.copytree(src_dir, dst_dir, symlinks=True,
+ ignore=shutil.ignore_patterns('*.pyc', 'tmp*', '.git*'))
+
+ # delete the README.OpenSource file
+ release_file = os.path.join(dst_dir, RELEASE_FILENAME)
+ os.remove(release_file)
+ return True
+
+
+def parse_opensource_file(opensource_config_file):
+ if not os.path.exists(opensource_config_file):
+ print("Warning, the opensource config file is not exists.")
+ return False
+
+ opensource_config = read_json_file(opensource_config_file)
+ if opensource_config is None:
+ raise Exception("read opensource config file [{}] failed.".format(
+ opensource_config_file))
+
+ result = False
+ for info in opensource_config:
+ license = info.get('License')
+ if license.count('GPL') > 0 or license.count('LGPL') > 0:
+ result = copy_opensource_file(opensource_config_file)
+
+ return result
+
+
+def scan_and_package_code_release(scan_dir):
+ file_dir_names = os.listdir(scan_dir)
+ for file_dir_name in file_dir_names:
+ file_dir_path = os.path.join(scan_dir, file_dir_name)
+ if os.path.isdir(file_dir_path):
+ scan_and_package_code_release(file_dir_path)
+ elif file_dir_path == os.path.join(scan_dir, RELEASE_FILENAME):
+ parse_opensource_file(file_dir_path)
+
+
+def scan_opensource_dir_list(scan_list):
+ for scan_dir in scan_list:
+ scan_and_package_code_release(scan_dir)
+
+
+def tar_opensource_package_file():
+ package_dir = get_package_dir()
+ top_dir = get_source_top_dir()
+ result = -1
+ if os.path.exists(package_dir):
+ package_filename = os.path.join(
+ top_dir, 'out', 'Code_Opensource.tar.gz')
+ try:
+ with tarfile.open(package_filename, "w:gz") as tar:
+ tar.add(package_dir, arcname=os.path.basename(package_dir))
+ result = 0
+ except IOError as err:
+ raise err
+ return result
+
+
+def main():
+ # get the source top directory to be scan
+ top_dir = get_source_top_dir()
+
+ # generate base_dir/out/Code_Opensource dir
+ package_dir = get_package_dir()
+ if os.path.exists(package_dir):
+ shutil.rmtree(package_dir)
+ os.makedirs(package_dir)
+
+ # scan the target dir and copy release code to out/opensource dir
+ dir_list = [os.path.join(top_dir, dir) for dir in scan_dir_list]
+ print(dir_list)
+ scan_opensource_dir_list(dir_list)
+
+ # package the opensource to Code_Opensource.tar.gz
+ if tar_opensource_package_file() == 0:
+ print('Generate the opensource package successfully.')
+ else:
+ print('Generate the opensource package failed.')
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/compile_resources.py b/dsoftbus/build/scripts/compile_resources.py
new file mode 100755
index 0000000000000000000000000000000000000000..3658004e6b279e665cb38deb8b0fe4deacf63f77
--- /dev/null
+++ b/dsoftbus/build/scripts/compile_resources.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import os
+import sys
+import shutil
+import tempfile
+import json
+
+from util import build_utils # noqa: E402
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--output-resources-zipfile',
+ help='path to packaged resources')
+ parser.add_option('--output-header-file',
+ help='path to generated ResourceTable.h')
+ parser.add_option('--resources-dir', help='resources directory')
+ parser.add_option('--restool-path', help='path to restool')
+ parser.add_option('--hap-profile', help='path to hap profile')
+ parser.add_option('--package-name', help='package name of resource file')
+
+ options, _ = parser.parse_args(args)
+ options.resources_dir = build_utils.parse_gn_list(options.resources_dir)
+ return options
+
+
+def get_package_name_from_profile(profile):
+ with open(profile) as fp:
+ return json.load(fp)['module']['package']
+
+
+def compile_resources(options):
+ with build_utils.temp_dir() as build:
+ res_dir = os.path.join(build, 'resources')
+ gen_dir = os.path.join(build, 'gen')
+ header_dir = os.path.join(build, 'header')
+ os.makedirs(res_dir)
+ os.makedirs(gen_dir)
+ os.makedirs(header_dir)
+
+ for directory in options.resources_dir:
+ shutil.copytree(directory,
+ os.path.join(res_dir, os.path.basename(directory)))
+ cmd = [options.restool_path, '-i', res_dir]
+ shutil.copy(options.hap_profile, os.path.join(res_dir, 'config.json'))
+ if options.package_name != "" and options.package_name is not None:
+ package_name = options.package_name
+ else:
+ package_name = get_package_name_from_profile(options.hap_profile)
+ generated_header_file = os.path.join(
+ header_dir, os.path.basename(options.output_header_file))
+ cmd.extend(
+ ['-p', package_name, '-o', gen_dir, '-r', generated_header_file])
+ build_utils.check_output(cmd)
+ R_txt_path = os.path.join(gen_dir, 'R.txt')
+ if os.path.exists(R_txt_path):
+ os.unlink(R_txt_path)
+ if options.output_resources_zipfile:
+ build_utils.zip_dir(options.output_resources_zipfile, gen_dir)
+ if options.output_header_file:
+ shutil.copy(generated_header_file, options.output_header_file)
+
+
+def main(args):
+ options = parse_args(args)
+
+ inputs = ([options.restool_path, options.hap_profile])
+ depfiles = []
+ for directory in options.resources_dir:
+ depfiles += (build_utils.get_all_files(directory))
+
+ input_strings = [options.package_name] if options.package_name else []
+ outputs = []
+ if options.output_resources_zipfile:
+ outputs.append(options.output_resources_zipfile)
+ if options.output_header_file:
+ outputs.append(options.output_header_file)
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: compile_resources(options),
+ options,
+ depfile_deps=depfiles,
+ input_paths=inputs + depfiles,
+ input_strings=input_strings,
+ output_paths=(outputs),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/scripts/compile_resources.pydeps b/dsoftbus/build/scripts/compile_resources.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..6acff54c2a2d373d29a3ef6a43f8c8c47a0593ed
--- /dev/null
+++ b/dsoftbus/build/scripts/compile_resources.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/scripts --output build/scripts/compile_resources.pydeps build/scripts/compile_resources.py
+../gn_helpers.py
+compile_resources.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/pycache.py
diff --git a/dsoftbus/build/scripts/copy_ex.py b/dsoftbus/build/scripts/copy_ex.py
new file mode 100755
index 0000000000000000000000000000000000000000..387eec703653cdba89545e88a61e483a556fe2a5
--- /dev/null
+++ b/dsoftbus/build/scripts/copy_ex.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Copies files to a directory."""
+
+import filecmp
+import itertools
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def copy_tree(src,
+ dest,
+ follow_all_symlinks=False,
+ follow_outside_symlinks=False):
+ """copy src/* to dest/
+ I. If follow_outside_symlinks is true,
+ 1. If src item is a symlink, and points to some item inside src, then copy
+ the symlink to dest.
+ 2. If src item points to items outside src, then follow links to copy the
+ original file to dest.
+ 3. Else copy src item to dest.
+ II. If follow_all_symlinks is true,
+ 1. If src item is a symlink, then follow links to copy the original file to
+ dest.
+ 2. Else copy src item to dest.
+ follow_outside_symlinks is true when follow_all_symlinks is true.
+ """
+ with os.scandir(src) as itr:
+ items = list(itr)
+ return _do_copy_tree(
+ items,
+ src,
+ dest,
+ follow_all_symlinks=follow_all_symlinks,
+ follow_outside_symlinks=follow_outside_symlinks)
+
+
+def _do_copy_tree(items,
+ src,
+ dest,
+ follow_all_symlinks=False,
+ follow_outside_symlinks=False):
+ errors = []
+ os.makedirs(dest, exist_ok=True)
+ for item in items:
+ srcname = os.path.join(src, item.name)
+ destname = os.path.join(dest, item.name)
+ is_symlink = item.is_symlink()
+ if is_symlink:
+ org_linkto = os.readlink(srcname)
+ linkto = org_linkto
+ if not os.path.isabs(org_linkto):
+ linkto = os.path.join(os.path.dirname(item), org_linkto)
+
+ if not os.path.exists(linkto):
+ os.symlink(org_linkto, destname)
+ shutil.copymode(srcname, destname, follow_symlinks=False)
+ continue
+
+ if follow_all_symlinks:
+ if item.is_dir():
+ copy_tree(
+ item,
+ destname,
+ follow_all_symlinks=follow_all_symlinks,
+ follow_outside_symlinks=follow_outside_symlinks)
+ else:
+ shutil.copy(item, destname)
+ shutil.copymode(item, destname)
+
+ elif follow_outside_symlinks:
+ if os.path.abspath(src) in os.path.abspath(
+ linkto) and not os.path.isabs(org_linkto):
+ os.symlink(org_linkto, destname)
+ shutil.copymode(srcname, destname, follow_symlinks=False)
+ else:
+ if item.is_dir():
+ copy_tree(
+ item,
+ destname,
+ follow_all_symlinks=follow_all_symlinks,
+ follow_outside_symlinks=follow_outside_symlinks)
+ else:
+ shutil.copy(item, destname)
+ shutil.copymode(item, destname)
+ else:
+ os.symlink(org_linkto, destname)
+ shutil.copymode(srcname, destname, follow_symlinks=False)
+ elif item.is_dir():
+ copy_tree(
+ item,
+ destname,
+ follow_all_symlinks=follow_all_symlinks,
+ follow_outside_symlinks=follow_outside_symlinks)
+ else:
+ shutil.copy(item, destname)
+ shutil.copymode(item, destname)
+ shutil.copystat(src, dest)
+ return dest
+
+
+def CopyFile(f,
+ dest,
+ deps,
+ follow_all_symlinks=False,
+ follow_outside_symlinks=False):
+ """Copy file or directory and update deps."""
+ if os.path.isdir(f):
+ copy_tree(
+ f,
+ os.path.join(dest, os.path.basename(f)),
+ follow_all_symlinks=follow_all_symlinks,
+ follow_outside_symlinks=follow_outside_symlinks)
+ deps.extend(build_utils.get_all_files(f))
+ else:
+ if os.path.isfile(os.path.join(dest, os.path.basename(f))):
+ dest = os.path.join(dest, os.path.basename(f))
+
+ deps.append(f)
+
+ if os.path.isfile(dest):
+ if filecmp.cmp(dest, f, shallow=False):
+ return
+ # The shutil.copy() below would fail if the file does not have write
+ # permissions. Deleting the file has similar costs to modifying the
+ # permissions.
+ os.unlink(dest)
+
+ shutil.copy(f, dest)
+
+
+def DoCopy(options, deps):
+ """Copy files or directories given in options.files and update deps."""
+ files = list(
+ itertools.chain.from_iterable(
+ build_utils.parse_gn_list(f) for f in options.files))
+
+ for f in files:
+ if not options.ignore_stale:
+ if os.path.isdir(f) and not options.clear:
+ print('To avoid stale files you must use --clear when copying '
+ 'directories')
+ sys.exit(-1)
+ CopyFile(
+ f,
+ options.dest,
+ deps,
+ follow_all_symlinks=options.follow_all_symlinks,
+ follow_outside_symlinks=options.follow_outside_symlinks)
+
+
+def DoRenaming(options, deps):
+ """Copy and rename files given in options.renaming_sources and update deps."""
+ src_files = list(
+ itertools.chain.from_iterable(
+ build_utils.parse_gn_list(f) for f in options.renaming_sources))
+
+ dest_files = list(
+ itertools.chain.from_iterable(
+ build_utils.parse_gn_list(f) for f in options.renaming_destinations))
+
+ if (len(src_files) != len(dest_files)):
+ print('Renaming source and destination files not match.')
+ sys.exit(-1)
+
+ for src, dest in zip(src_files, dest_files):
+ if os.path.isdir(src):
+ print('renaming directory is not supported.')
+ sys.exit(-1)
+ else:
+ CopyFile(src, os.path.join(options.dest, dest), deps)
+
+
+def main(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+
+ parser.add_option('--dest', help='Directory to copy files to.')
+ parser.add_option('--files', action='append', help='List of files to copy.')
+ parser.add_option(
+ '--clear',
+ action='store_true',
+ help='If set, the destination directory will be deleted '
+ 'before copying files to it. This is highly recommended to '
+ 'ensure that no stale files are left in the directory.')
+ parser.add_option(
+ '--ignore-stale',
+ action='store_true',
+ help='Do copy even there may be stale files. '
+ 'If set, overrule options.clear')
+ parser.add_option('--stamp', help='Path to touch on success.')
+ parser.add_option(
+ '--follow-all-symlinks',
+ action='store_true',
+ help='whether to follow symlinks')
+ parser.add_option(
+ '--follow-outside-symlinks',
+ action='store_true',
+ help='whether to follow symlinks which points to targets outside of source directory'
+ )
+ parser.add_option(
+ '--renaming-sources',
+ action='append',
+ help='List of files need to be renamed while being '
+ 'copied to dest directory')
+ parser.add_option(
+ '--renaming-destinations',
+ action='append',
+ help='List of destination file name without path, the '
+ 'number of elements must match rename-sources.')
+
+ options, _ = parser.parse_args(args)
+ if options.follow_all_symlinks:
+ options.follow_outside_symlinks = True
+
+ if options.clear and not options.ignore_stale:
+ build_utils.delete_directory(options.dest)
+ build_utils.make_directory(options.dest)
+
+ deps = []
+
+ if options.files:
+ DoCopy(options, deps)
+
+ if options.renaming_sources:
+ DoRenaming(options, deps)
+
+ if options.depfile:
+ build_utils.write_depfile(
+ options.depfile, options.stamp, deps, add_pydeps=False)
+
+ if options.stamp:
+ build_utils.touch(options.stamp)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/scripts/copy_ex.pydeps b/dsoftbus/build/scripts/copy_ex.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..bd081243d3305dc223629705eb489a06ebbb5a23
--- /dev/null
+++ b/dsoftbus/build/scripts/copy_ex.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/scripts --output build/scripts/copy_ex.pydeps build/scripts/copy_ex.py
+../gn_helpers.py
+copy_ex.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/pycache.py
diff --git a/dsoftbus/build/scripts/dir_exists.py b/dsoftbus/build/scripts/dir_exists.py
new file mode 100755
index 0000000000000000000000000000000000000000..5fa150d9a0efd0f92c58124689335443d95ef49f
--- /dev/null
+++ b/dsoftbus/build/scripts/dir_exists.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+
+def main():
+ sys.stdout.write(_is_dir(sys.argv[1]))
+ return 0
+
+
+def _is_dir(dir_name):
+ return str(os.path.isdir(dir_name))
+
+
+def DoMain(args):
+ """Hook to be called from gyp without starting a separate python
+ interpreter."""
+ return _is_dir(args[0])
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/find.py b/dsoftbus/build/scripts/find.py
new file mode 100755
index 0000000000000000000000000000000000000000..0704b723be10b8b7d9cc79cb24eae607bed4a336
--- /dev/null
+++ b/dsoftbus/build/scripts/find.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+from util import build_utils
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option('--pattern', default='*', help='File pattern to match.')
+ parser.add_option('--base-dir', help='base directory')
+ parser.add_option(
+ '--return-relpath',
+ action='store_true',
+ help='return relative address from base directory')
+ parser.add_option(
+ '--follow-symlinks', action='store_true', help='whether to follow links')
+ options, directories = parser.parse_args(argv)
+
+ for d in directories:
+ if not os.path.exists(d):
+ print('%s does not exist' % d)
+ return 1
+ if os.path.isfile(d):
+ if options.return_relpath:
+ if options.base_dir is not None:
+ if fnmatch.filter(d, options.pattern):
+ print(os.path.relpath(d, options.base_dir))
+ else:
+ print("Please specify the relative base directory")
+ return 1
+ else:
+ if fnmatch.filter(d, options.pattern):
+ print(d)
+ return 0
+ elif not os.path.isdir(d):
+ # if input path is not a directory nor a normal file, return error.
+ print('%s is not a directory or a file' % d)
+ return 1
+ for root, _, files in os.walk(d, followlinks=options.follow_symlinks):
+ for f in fnmatch.filter(files, options.pattern):
+ if options.return_relpath:
+ if options.base_dir is not None:
+ print(os.path.relpath(os.path.join(root, f), options.base_dir))
+ else:
+ print("Please specify the relative base directory")
+ return 1
+ else:
+ print(os.path.join(root, f))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/scripts/gen_sdk_build_file.py b/dsoftbus/build/scripts/gen_sdk_build_file.py
new file mode 100755
index 0000000000000000000000000000000000000000..df456c929266ab6f14ecb0c515e71c1410a6c05c
--- /dev/null
+++ b/dsoftbus/build/scripts/gen_sdk_build_file.py
@@ -0,0 +1,244 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import shutil
+import json
+from interface_mgr import InterfaceMgr
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, write_file, write_json_file
+from scripts.util import build_utils
+
+SHARED_LIBRARY_BUILD_TEMPLATE = '''
+config("%s_config") {
+ include_dirs = [ "%s/include", ]
+}
+ohos_prebuilt_shared_library("%s") {
+ source = "%s"
+ public_configs = [ ":%s_config" ]
+ subsystem_name = "%s"
+ part_name = "%s"
+ install_enable = false
+}
+
+'''
+JAR_BUILD_TEMPLATE = '''
+java_prebuilt("%s") {
+ jar_path = "%s"
+'''
+MAPLE_BUILD_TEMPLATE = '''
+ohos_maple_java_prebuilt("%s") {
+ mplt = %s
+ jar_path = "%s"
+'''
+
+
+# check sdk header file signature,
+# skip check if file dir not exist
+def check_header_files(checkfile_dir, subsystem_sdk_dir, subsystem_name,
+ module_name, interface_mgr):
+ if checkfile_dir is None or not os.path.exists(checkfile_dir):
+ return
+ interface_mgr.check(checkfile_dir, subsystem_sdk_dir, subsystem_name,
+ module_name)
+
+
+def get_build_config_from_label(label, current_toolchain_dir):
+ build_config = label.replace('//', 'gen/').replace(':', '/')
+ bc_file = '{}/{}.build_config'.format(current_toolchain_dir, build_config)
+ return bc_file
+
+
+def add_dynamic_deps(output, build_config):
+ external_deps = build_utils.expand_file_args(
+ ['@FileArg({}:deps_info:external_deps)'.format(build_config)])
+ if external_deps[0] != '[ ]':
+ output += "external_deps = " + external_deps[0]
+ output += "}"
+ return output
+
+
+# Copy the SDK module to the sdk output directory,
+# and generate the prebuilt template to BUILD.gn file.
+def gen_build_file(input_file, sdk_out_dir, generate_sig,
+ signature_file_check_dir):
+ data = read_json_file(input_file)
+ if data is None:
+ raise Exception(
+ "sdk interface description info error, file [{}] does not exist.".
+ format(input_file))
+
+ build_file_content = 'import("//build/ohos.gni")\n'
+ build_file_content += 'import("//build/config/ohos/rules.gni")\n'
+ sdk_info = {}
+ # interface check
+ interface_mgr = InterfaceMgr()
+ for module_desc in data:
+ subsystem_name = module_desc.get('subsystem_name')
+ part_name = module_desc.get('part_name')
+ origin_name = module_desc.get('origin_name')
+ module_type = module_desc.get('type')
+ module_name = module_desc.get('name')
+ module_info_file = module_desc.get('source_info')
+ _current_toolchain_dir = module_desc.get('current_toolchain_dir')
+
+ # read module_info.json file, get module source path
+ source_data = read_json_file(module_info_file)
+ if not source_data:
+ print("read module [{}] info file failed, filename [{}].".format(
+ module_name, module_info_file))
+ continue
+
+ # copy lib file
+ source = source_data.get('source')
+ if module_type == 'jar':
+ source = source_data.get('alternative_source')
+
+ # check sdk type consistency
+ suffix = module_type
+ if module_type == "none":
+ continue
+ # Don't check suffix for maple sdk.
+ if module_type == "maple":
+ pass
+ elif not source.endswith(suffix):
+ raise Exception(
+ "sdk module [{}] type configuration is inconsistent.".format(
+ module_name))
+
+ module_sdk_out_dir = os.path.join(sdk_out_dir, module_name)
+ if not os.path.exists(module_sdk_out_dir):
+ os.makedirs(module_sdk_out_dir)
+ if module_type == "maple":
+ pass
+ else:
+ shutil.copy(source, module_sdk_out_dir)
+
+ # copy headers file
+ header_base = module_desc.get('header_base')
+ if not header_base:
+ raise Exception("{}: header_base config error.".format(
+ module_desc.get('label')))
+
+ header_files = module_desc.get('header_files')
+ # copy to sdk/{subsystem_name}/{module_name}/include/
+ for header_file in header_files:
+ rel_h_file = os.path.join(header_base, header_file)
+
+ header_file_dest = os.path.join(module_sdk_out_dir, 'include',
+ header_file)
+ header_file_dest_dir = os.path.dirname(header_file_dest)
+ if not os.path.exists(header_file_dest_dir):
+ os.makedirs(header_file_dest_dir)
+
+ shutil.copy(rel_h_file, header_file_dest)
+
+ # generate or check header files
+ if module_type == 'so' and generate_sig is False:
+ check_header_files(signature_file_check_dir, sdk_out_dir,
+ origin_name, module_name, interface_mgr)
+
+ # gen build file
+ lib_file = os.path.join(module_name, os.path.basename(source))
+ if module_type == 'so':
+ build_file_content += SHARED_LIBRARY_BUILD_TEMPLATE % (
+ module_name, module_name, module_name, lib_file, module_name,
+ subsystem_name, part_name)
+ elif module_type == 'jar':
+ bc = get_build_config_from_label(source_data.get("label"),
+ _current_toolchain_dir)
+ build_file_content += JAR_BUILD_TEMPLATE % (module_name, lib_file)
+ build_file_content = add_dynamic_deps(build_file_content, bc)
+ elif module_type == 'maple':
+ bc = get_build_config_from_label(source_data.get("label"),
+ _current_toolchain_dir)
+ bc_mplts = build_utils.expand_file_args(
+ ['@FileArg({}:outputs:output_mplt)'.format(bc)])
+ bc_mplts = build_utils.parse_and_flatten_gn_lists(bc_mplts)
+ sdk_mplts = []
+ base_dir = os.path.basename(module_sdk_out_dir)
+ for mplt in bc_mplts:
+ shutil.copy(mplt, module_sdk_out_dir)
+ sdk_mplts.append(os.path.join(base_dir,
+ os.path.basename(mplt)))
+
+ bc_jar_path = build_utils.expand_file_args(
+ ["@FileArg({}:deps_info:unprocessed_jar_path)".format(bc)])
+ bc_jar_path = build_utils.parse_and_flatten_gn_lists(bc_jar_path)
+ sdk_jar_path = []
+ for jar in bc_jar_path:
+ shutil.copy(jar, module_sdk_out_dir)
+ sdk_jar_path.append(
+ os.path.join(base_dir, os.path.basename(jar)))
+ build_file_content += MAPLE_BUILD_TEMPLATE % (
+ module_name, json.dumps(sdk_mplts, sort_keys=True,
+ indent=2), sdk_jar_path[0])
+ build_file_content = add_dynamic_deps(build_file_content, bc)
+ else:
+ raise Exception(
+ 'The type of sdk module configuration is not supported.')
+
+ module_info = {
+ 'source': lib_file,
+ 'label': module_desc.get('label'),
+ 'type': module_type
+ }
+ sdk_info[module_name] = module_info
+ return build_file_content, sdk_info
+
+
+def generate_sdk(input_file, sdk_out_dir, output_build_file, sdk_info_file,
+ generate_sig, signature_file_check_dir,
+ signature_file_gen_dir):
+ build_file_content, sdk_info = gen_build_file(input_file, sdk_out_dir,
+ generate_sig,
+ signature_file_check_dir)
+ # write file, build gn file
+ write_file(output_build_file, build_file_content)
+ # sdk info return
+ write_json_file(sdk_info_file, sdk_info)
+
+ # gen signature file
+ if generate_sig is True:
+ interface_mgr = InterfaceMgr()
+ interface_mgr.gen_sig_file_by_subsystem(sdk_out_dir,
+ signature_file_gen_dir)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input-file', help='', required=True)
+ parser.add_argument('--sdk-out-dir', help='', required=True)
+ parser.add_argument('--output-build-file', help='', required=True)
+ parser.add_argument('--sdk-info-file', help='', required=True)
+ parser.add_argument('--generate-sig',
+ dest='generate_sig',
+ action='store_true')
+ parser.set_defaults(generate_sig=False)
+ parser.add_argument('--signature-file-check-dir', help='', required=False)
+ parser.add_argument('--signature-file-gen-dir', help='', required=False)
+ args = parser.parse_args()
+
+ generate_sdk(args.input_file, args.sdk_out_dir, args.output_build_file,
+ args.sdk_info_file, args.generate_sig,
+ args.signature_file_check_dir, args.signature_file_gen_dir)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/gen_sdk_build_file.pydeps b/dsoftbus/build/scripts/gen_sdk_build_file.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..f2a99432cb45b1f4035e1278f84ba7a8c5c8a33a
--- /dev/null
+++ b/dsoftbus/build/scripts/gen_sdk_build_file.pydeps
@@ -0,0 +1,11 @@
+# Generated by running:
+# build/print_python_deps.py --root build/scripts --output build/scripts/gen_sdk_build_file.pydeps build/scripts/gen_sdk_build_file.py
+../gn_helpers.py
+__init__.py
+gen_sdk_build_file.py
+interface_mgr.py
+util/__init__.py
+util/build_utils.py
+util/file_utils.py
+util/md5_check.py
+util/pycache.py
diff --git a/dsoftbus/build/scripts/gen_subsystem_ebpf_testcase_config.py b/dsoftbus/build/scripts/gen_subsystem_ebpf_testcase_config.py
new file mode 100755
index 0000000000000000000000000000000000000000..a9a247c451a24f899ad3a59cfba1a0451268a8c1
--- /dev/null
+++ b/dsoftbus/build/scripts/gen_subsystem_ebpf_testcase_config.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+
+Usage: gen_ebpf_testcase_config.py --subsystem-name common \
+ --subsystem-testcase-config-file xx/xxx.json \
+ --subsystem-testcase-list [xx/xx.py yy/yy.py ... ] \
+ ----subsystem-testcase-collect-path [xxx]
+
+Generate the ebpf testcase config files.
+
+"""
+import argparse
+import os
+import os.path
+import sys
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import write_json_file # noqa: E402, E501
+
+
+def get_testcase_dest_list(src_testcase_list, testcase_collect_path):
+ dest_list = []
+ for testcase in src_testcase_list:
+ file_name = os.path.basename(testcase)
+ dest_list.append(os.path.join(testcase_collect_path, file_name))
+ return dest_list
+
+
+def write_testcase_config_file(subsystem_name,
+ config_file,
+ testcase_list,
+ testcase_collect_path):
+ dest_list = get_testcase_dest_list(testcase_list, testcase_collect_path)
+
+ data = {
+ 'subsystem_name': subsystem_name,
+ 'testcase_src_list': testcase_list,
+ 'testcase_dest_list': dest_list
+ }
+
+ # write the subsystem ebpf testcase config file.
+ write_json_file(config_file, data)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--subsystem-name', help='', required=True)
+ parser.add_argument('--subsystem-ebpf-testcase-config-file',
+ help='',
+ required=True)
+ parser.add_argument('--subsystem-testcase-list', nargs='+',
+ help='',
+ required=True)
+ parser.add_argument('--subsystem-testcase-collect-path',
+ help='',
+ required=True)
+ args = parser.parse_args()
+
+ subsystem_name = args.subsystem_name
+ subsystem_testcase_config_file = args.subsystem_ebpf_testcase_config_file
+ subsystem_testcase_list = args.subsystem_testcase_list
+ subsystem_testcase_collect_path = args.subsystem_testcase_collect_path
+
+ write_testcase_config_file(subsystem_name,
+ subsystem_testcase_config_file,
+ subsystem_testcase_list,
+ subsystem_testcase_collect_path)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/dsoftbus/build/scripts/gen_summary_ebpf_testcase_config.py b/dsoftbus/build/scripts/gen_summary_ebpf_testcase_config.py
new file mode 100755
index 0000000000000000000000000000000000000000..6e472b07d9181631b44bf20e5fd5e61efd496d3e
--- /dev/null
+++ b/dsoftbus/build/scripts/gen_summary_ebpf_testcase_config.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+
+Usage: gen_summary_ebpf_testcase_config.py \
+ --ebpf-testcase-path out/xxx/ebpf_testcase \
+ --ebpf-summary-config-file xx/xxx.json \
+
+Generate the summary ebpf testcase config files.
+
+"""
+import argparse
+import os
+import os.path
+import sys
+import json
+
+
+def summary_subsystem_config_file(testcase_dir, summary_file):
+ if testcase_dir == ' ':
+ return
+
+ subsystem_list = []
+ for root, dirs, files in os.walk(testcase_dir):
+ for name in files:
+ if name.endswith('.json'):
+ subsystem_list.append(os.path.join(root, name))
+
+ # load the subsystem testcase info
+ context = []
+ for file_path in subsystem_list:
+ try:
+ with open(file_path, 'r') as infile:
+ file_data = json.load(infile)
+ context.append(file_data)
+ except OSError as err:
+ raise err
+
+ # write the summary file.json
+ try:
+ with open(summary_file, 'w') as out_file:
+ json.dump(context, out_file, sort_keys=True, indent=2)
+ except OSError as err:
+ raise err
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ebpf-testcase-path', help='', required=True)
+ parser.add_argument('--ebpf-summary-config-file', help='', required=True)
+ args = parser.parse_args()
+
+ testcase_dir = args.ebpf_testcase_path
+ summary_file = args.ebpf_summary_config_file
+
+ if os.path.exists(summary_file):
+ os.remove(summary_file)
+
+ summary_subsystem_config_file(testcase_dir, summary_file)
+
+
+if __name__ == "__main__":
+ main(sys.argv)
diff --git a/dsoftbus/build/scripts/get_all_files.py b/dsoftbus/build/scripts/get_all_files.py
new file mode 100755
index 0000000000000000000000000000000000000000..0e027fb59909c7d15e3ac4508c67a4e8ca0ab631
--- /dev/null
+++ b/dsoftbus/build/scripts/get_all_files.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import write_file
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--base-dir', required=True)
+ parser.add_argument('--sub-dir-list', nargs='+', required=False)
+ parser.add_argument('--result-file', required=False)
+ args = parser.parse_args()
+
+ file_list = []
+ dir_list = []
+ if args.sub_dir_list:
+ for sub_dir in args.sub_dir_list:
+ dir_list.append(os.path.join(args.base_dir, sub_dir))
+ else:
+ dir_list.append(args.base_dir)
+ for _dir in dir_list:
+ if not os.path.exists(_dir):
+ continue
+ for root, _, files in os.walk(_dir):
+ for _file in files:
+ file_list.append(os.path.realpath(os.path.join(root, _file)))
+
+ if args.result_file:
+ write_file(args.result_file, "\n".join(file_list))
+ else:
+ for _file in file_list:
+ print(_file)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/get_warnings.py b/dsoftbus/build/scripts/get_warnings.py
new file mode 100755
index 0000000000000000000000000000000000000000..e22e703e4db6e40f21745c1552f1df32fc9592e8
--- /dev/null
+++ b/dsoftbus/build/scripts/get_warnings.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+#-*- coding: UTF-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+
+Usage: get_warnings.py --build-log-file out/phone-release/build.log \
+ --warning-out-file out/phone-release/warning_list.txt
+
+Generate the project notice files, including both text and xml files.
+
+"""
+
+import argparse
+import os
+import sys
+import re
+import subprocess
+
+
+def _do_uniq(infile, outfile):
+ subprocess.call(['sort', '-u', infile, '-o', outfile], shell=False)
+
+
+def _pick_line(line_info):
+ result = False
+ parser_keys = [
+ ": Warning", ": warning", "warning:", "Warning:", "WARNING:"
+ ]
+ for _key in parser_keys:
+ if len(re.findall(_key, line_info, re.S)) >= 1:
+ result = True
+ break
+ return result
+
+
+def _parse(in_name, out_name, prj_dir):
+ if not os.path.exists(in_name):
+ print("warning: build log file {} is not exists.".format(in_name))
+ return False
+
+ with open(in_name, "r", encoding='utf-8', errors='ignore') as in_fd:
+ os.makedirs(os.path.dirname(out_name), exist_ok=True)
+ with open(out_name, "w") as out_fd:
+ while True:
+ line_info = in_fd.readline()
+ line_info = line_info.replace("\r", "")
+ if line_info == "":
+ break
+ # Solve the non-standard printing in kernel compilation --begin
+ if (line_info.find("kernel/linux-") >
+ 1) and (": warning:" in line_info):
+ line_info = line_info[line_info.find("kernel/linux-"):]
+ # Solve the non-standard printing in kernel compilation --end
+ if _pick_line(line_info):
+ while True:
+ if line_info.startswith("../"):
+ line_info = line_info[3:]
+ elif line_info.startswith("./"):
+ line_info = line_info[2:]
+ elif line_info.startswith("\""):
+ line_info = line_info[1:]
+ elif line_info.startswith(":"):
+ line_info = line_info[1:]
+ elif line_info.startswith(" "):
+ line_info = line_info[1:]
+ else:
+ break
+ # solving relative path
+ templist = line_info.split(":")
+ templist[0] = os.path.abspath(templist[0])
+ templist[0] = templist[0].replace(prj_dir + "/",
+ "").strip()
+ temp = ":"
+ line_info = temp.join(templist)
+
+ out_fd.write(line_info)
+ out_fd.write("\r\n")
+ return True
+
+
+def _get_warn(log_file, warn_file, prj_dir):
+ if os.path.exists(warn_file):
+ os.remove(warn_file)
+ temp_out_file = os.path.join(os.path.dirname(warn_file),
+ 'temp_warning.txt')
+ result = _parse(log_file, temp_out_file, prj_dir)
+ if result:
+ _do_uniq(temp_out_file, warn_file)
+ # delete temp file
+ if os.path.exists(temp_out_file):
+ os.remove(temp_out_file)
+
+
+def main(argv):
+ """parse warning info from build log."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--build-log-file', help='log file', required=True)
+ parser.add_argument('--warning-out-file',
+ help='result file',
+ required=True)
+ args = parser.parse_args(argv)
+
+ log_file = args.build_log_file
+ warn_file = args.warning_out_file
+ prj_dir = os.getcwd()
+ _get_warn(log_file, warn_file, prj_dir)
+ return 0
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/dsoftbus/build/scripts/hapbuilder.py b/dsoftbus/build/scripts/hapbuilder.py
new file mode 100755
index 0000000000000000000000000000000000000000..2b87f39693cac441f1d92b446fe129fc0a4f3359
--- /dev/null
+++ b/dsoftbus/build/scripts/hapbuilder.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import optparse
+import subprocess
+import sys
+import shutil
+import os
+import tempfile
+from util import build_utils # noqa: E402
+
+
+def sign_hap(hapsigner, private_key_path, sign_algo, certificate_profile,
+ keystore_path, keystorepasswd, keyalias, certificate_file,
+ unsigned_hap_path, signed_hap_path):
+ cmd = ['java', '-jar', hapsigner, 'sign']
+ cmd.extend(['-mode', 'localjks'])
+ cmd.extend(['-signAlg', sign_algo])
+ cmd.extend(['-privatekey', private_key_path])
+ cmd.extend(['-inputFile', unsigned_hap_path])
+ cmd.extend(['-outputFile', signed_hap_path])
+ cmd.extend(['-profile', certificate_profile])
+ cmd.extend(['-keystore', keystore_path])
+ cmd.extend(['-keystorepasswd', keystorepasswd])
+ cmd.extend(['-keyaliaspasswd', keyalias])
+ cmd.extend(['-certpath', certificate_file])
+ cmd.extend(['-profileSigned', '1'])
+ child = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = child.communicate()
+ if child.returncode:
+ print(stdout.decode(), stderr.decode())
+ raise Exception("Failed to sign hap")
+
+
+def create_hap(options, signed_hap):
+ with build_utils.temp_dir() as package_dir, tempfile.NamedTemporaryFile(
+ suffix='.hap') as output:
+ packing_cmd = ['java', '-jar', options.hap_packing_tool]
+ packing_cmd.extend(
+ ['--mode', 'hap', '--force', 'true', '--out-path', output.name])
+
+ hap_profile_path = os.path.join(package_dir,
+ os.path.basename(options.hap_profile))
+ shutil.copy(options.hap_profile, hap_profile_path)
+ packing_cmd.extend(['--json-path', hap_profile_path])
+
+ if options.dso:
+ lib_path = os.path.join(package_dir, "lib")
+ os.mkdir(lib_path)
+ for dso in options.dso:
+ shutil.copy(dso, lib_path)
+ packing_cmd.extend(['--lib-path', lib_path])
+
+ if options.packaged_resources:
+ build_utils.extract_all(options.packaged_resources,
+ package_dir,
+ no_clobber=False)
+ packing_cmd.extend(
+ ['--index-path',
+ os.path.join(package_dir, 'resources.index')])
+ packing_cmd.extend(
+ ['--res-path',
+ os.path.join(package_dir, 'resources')])
+
+ assets_dir = os.path.join(package_dir, 'assets')
+ if options.packaged_js_assets or options.assets:
+ packing_cmd.extend(['--assets-path', assets_dir])
+
+ if options.packaged_js_assets:
+ build_utils.extract_all(options.packaged_js_assets,
+ package_dir,
+ no_clobber=False)
+ if options.assets:
+ if not os.path.exists(assets_dir):
+ os.mkdir(assets_dir)
+ for dire in options.assets:
+ shutil.copytree(
+ dire,
+ os.path.join(assets_dir, os.path.basename(dire)))
+
+ build_utils.check_output(packing_cmd)
+
+ sign_hap(options.hapsigner, options.private_key_path,
+ options.sign_algo, options.certificate_profile,
+ options.keystore_path, options.keystorepasswd,
+ options.keyalias, options.certificate_file, output.name,
+ signed_hap)
+
+
+def parse_args(args):
+ args = build_utils.expand_file_args(args)
+
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+ parser.add_option('--hap-path', help='path to output hap')
+ parser.add_option('--hapsigner', help='path to signer')
+ parser.add_option('--assets', help='path to assets')
+ parser.add_option('--dso',
+ action="append",
+ help='path to dynamic shared objects')
+ parser.add_option('--hap-profile', help='path to hap profile')
+ parser.add_option('--hap-packing-tool', help='path to hap packing tool')
+ parser.add_option('--private-key-path', help='path to private key')
+ parser.add_option('--sign-algo', help='signature algorithm')
+ parser.add_option('--certificate-profile',
+ help='path to certificate profile')
+ parser.add_option('--keyalias', help='keyalias')
+ parser.add_option('--keystore-path', help='path to keystore')
+ parser.add_option('--keystorepasswd', help='password of keystore')
+ parser.add_option('--certificate-file', help='path to certificate file')
+ parser.add_option('--packaged-resources',
+ help='path to packaged resources')
+ parser.add_option('--packaged-js-assets',
+ help='path to packaged js assets')
+
+ options, _ = parser.parse_args(args)
+ if options.assets:
+ options.assets = build_utils.parse_gn_list(options.assets)
+ return options
+
+
+def main(args):
+ options = parse_args(args)
+
+ inputs = [
+ options.hap_profile, options.packaged_js_assets,
+ options.packaged_resources, options.certificate_file,
+ options.keystore_path, options.certificate_profile
+ ]
+ depfiles = []
+ for dire in options.assets:
+ depfiles += (build_utils.get_all_files(dire))
+ if options.dso:
+ depfiles.extend(options.dso)
+
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: create_hap(options, options.hap_path),
+ options,
+ depfile_deps=depfiles,
+ input_paths=inputs + depfiles,
+ input_strings=[
+ options.keystorepasswd, options.keyalias, options.sign_algo,
+ options.private_key_path
+ ],
+ output_paths=([options.hap_path]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/scripts/hapbuilder.pydeps b/dsoftbus/build/scripts/hapbuilder.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..23d77f8223d92e722bc84c3831858d8f28309998
--- /dev/null
+++ b/dsoftbus/build/scripts/hapbuilder.pydeps
@@ -0,0 +1,8 @@
+# Generated by running:
+# build/print_python_deps.py --root build/scripts --output build/scripts/hapbuilder.pydeps build/scripts/hapbuilder.py
+../gn_helpers.py
+hapbuilder.py
+util/__init__.py
+util/build_utils.py
+util/md5_check.py
+util/pycache.py
diff --git a/dsoftbus/build/scripts/interface_mgr.py b/dsoftbus/build/scripts/interface_mgr.py
new file mode 100755
index 0000000000000000000000000000000000000000..244daa85a9c218f6b2dc7796a1f2dde2fa6eb3eb
--- /dev/null
+++ b/dsoftbus/build/scripts/interface_mgr.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import hashlib
+import argparse
+
+
+class InterfaceMgr:
+ __max_buf = 1024 * 1024
+
+ def get_file_sha256(self, filename):
+ hash_value = None
+ if os.path.isfile(filename):
+ sha256obj = hashlib.sha256()
+ try:
+ with open(filename, 'rb') as f:
+ while True:
+ buf = f.read(self.__max_buf)
+ if not buf:
+ break
+ sha256obj.update(buf)
+ hash_value = sha256obj.hexdigest()
+ except OSError as err:
+ sys.stdout.write("read file failed. {}".format(err))
+ return ""
+ return str(hash_value)
+
+ def get_header_files(self, file_dir):
+ h_files = []
+ for path, _, files in os.walk(file_dir):
+ for file in files:
+ if file.endswith('.h'):
+ x_file = os.path.relpath(os.path.join(path, file),
+ file_dir)
+ h_files.append(x_file)
+ return h_files
+
+ def gen_sig_file_by_subsystem(self, subsystem_sdk_out_dir,
+ sig_file_gen_dir):
+ if not os.path.exists(subsystem_sdk_out_dir) or not os.path.isdir(
+ subsystem_sdk_out_dir):
+ raise Exception(
+ "subsystem sdk out dir '{}' not exist or not dir".format(
+ subsystem_sdk_out_dir))
+
+ module_list = os.listdir(subsystem_sdk_out_dir)
+ for module_name in module_list:
+ module_dir = os.path.join(subsystem_sdk_out_dir, module_name)
+ if not os.path.exists(module_dir) or not os.path.isdir(module_dir):
+ continue
+ header_files = self.get_header_files(module_dir)
+ if not header_files:
+ continue
+ check_content = []
+ for h_file in header_files:
+ file_sha256 = self.get_file_sha256(
+ os.path.join(module_dir, h_file))
+ check_content.append('{} {}'.format(h_file, file_sha256))
+
+ check_file = os.path.join(sig_file_gen_dir, module_name,
+ 'check.txt')
+ file_dir = os.path.dirname(os.path.abspath(check_file))
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir)
+
+ # sort check file content
+ check_content.sort()
+ with open(check_file, 'w') as output_file:
+ output_file.write('\n'.join(check_content))
+ output_file.flush()
+
+ def _gen_checkfile(self, check_file_dir, target_type_dir, target_type):
+ subsystem_list = os.listdir(target_type_dir)
+ for subsystem_name in subsystem_list:
+ subsystem_dir = os.path.join(target_type_dir, subsystem_name)
+ if not os.path.isdir(subsystem_dir) or target_type.startswith(
+ '.'):
+ continue
+ self.gen_sig_file_by_subsystem(subsystem_name, check_file_dir)
+
+ def gen_interface_checkfile(self, sdk_base_dir, check_file_dir):
+ if not os.path.isdir(sdk_base_dir):
+ raise Exception(
+ 'sdk base dir [{}] does not exist.'.format(sdk_base_dir))
+
+ target_type_allowlist = ['ohos-arm64']
+ target_types = os.listdir(sdk_base_dir)
+ for target_type in target_types:
+ target_type_dir = os.path.join(sdk_base_dir, target_type)
+ if not os.path.isdir(target_type_dir) or target_type.startswith(
+ '.'):
+ continue
+
+ if target_type == 'java':
+ continue
+
+ if target_type not in target_type_allowlist:
+ raise Exception('target type is incorrect.')
+
+ self._gen_checkfile(check_file_dir, target_type_dir, target_type)
+
+ def check(self, check_file_dir, subsystem_sdk_dir, subsystem_name,
+ module_name):
+ check_file = os.path.join(check_file_dir, subsystem_name, module_name,
+ 'check.txt')
+ if not os.path.isfile(check_file):
+ raise Exception(
+ '[{}:{}] interface check failed. file [{}] does not exist.'.
+ format(subsystem_name, module_name, check_file))
+
+ check_value = {}
+ with open(check_file, 'r') as f:
+ for line in f.readlines():
+ values = line.rstrip('\n').split(' ')
+ check_value[values[0]] = values[1]
+
+ module_dir = os.path.join(subsystem_sdk_dir, module_name)
+ header_files = self.get_header_files(module_dir)
+ if len(check_value) != len(header_files):
+ raise Exception(('[{}:{}] interface check failed. '
+ 'the number of files is different.').format(
+ subsystem_name, module_name))
+
+ for h_file in header_files:
+ file_sha256 = self.get_file_sha256(os.path.join(
+ module_dir, h_file))
+ if check_value.get(h_file) != file_sha256:
+ raise Exception(
+ '[{}:{}] interface check failed. file [{}] has changed.'.
+ format(subsystem_name, module_name, h_file))
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--generate', dest='generate', action='store_true')
+ parser.set_defaults(generate=False)
+ parser.add_argument('--sdk-base-dir', help='', required=True)
+ parser.add_argument('--check_file_dir', help='', required=True)
+ args = parser.parse_args()
+
+ if args.generate:
+ interface_mgr = InterfaceMgr()
+ interface_mgr.gen_interface_checkfile(args.sdk_base_dir,
+ args.check_file_dir)
+ else:
+ sys.stdout.write('Usage: interface_mgr.py --generate '
+ '--sdk-base-dir SDK_BASE_DIR '
+ '--check_file_dir CHECK_FILE_DIR')
+ sys.stdout.flush()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/merge_notice.py b/dsoftbus/build/scripts/merge_notice.py
new file mode 100755
index 0000000000000000000000000000000000000000..b230aa4db5631412e24f880aaae4da0b874688c9
--- /dev/null
+++ b/dsoftbus/build/scripts/merge_notice.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+import shutil
+
+
+def _merge_txt_file(ohos_notice, a_notice, output_notice):
+ if not os.path.exists(ohos_notice):
+ print("Warning, can not find the ohos notice file: {}.".format(
+ ohos_notice))
+ return
+
+ if not os.path.exists(a_notice):
+ print("Warning, can not find the notice file: {}.".format(a_notice))
+ shutil.move(ohos_notice, a_notice)
+ return
+
+ with open(output_notice, 'a') as a_file:
+ with open(ohos_notice, 'r', errors='ignore') as _file:
+ data = _file.readlines()
+ del data[0]
+ for line in data:
+ a_file.write(line)
+ with open(a_notice, 'r', errors='ignore') as _file:
+ data = _file.readlines()
+ del data[0]
+ for line in data:
+ a_file.write(line)
+
+ if os.path.exists(ohos_notice):
+ os.remove(ohos_notice)
+
+
+def main():
+ """notice file merge."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ohos-notice', required=True)
+ parser.add_argument('--a-notice', required=True)
+ parser.add_argument('--output-notice', required=True)
+ args = parser.parse_args()
+
+ # merge NOTICE.txt file
+ _merge_txt_file(args.ohos_notice, args.a_notice, args.output_notice)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/ninja2trace.py b/dsoftbus/build/scripts/ninja2trace.py
new file mode 100755
index 0000000000000000000000000000000000000000..fe755e128e8b1f2645506080f20d3a644fd6281e
--- /dev/null
+++ b/dsoftbus/build/scripts/ninja2trace.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import json
+import gzip
+import shutil
+import argparse
+
+KFILESIGNATURE = "# ninja log v5\n"
+
+
+class StoringDataLine(object):
+ def __init__(self, start, end):
+ self.start = int(start)
+ self.end = int(end)
+ self.target_obj_names = []
+
+ def __str__(self):
+ return "{} {} {} ".format(self.start, self.end, self.target_obj_names)
+
+
+class NinjaToTrace(object):
+ def __init__(self):
+ self.datalist = list()
+ self.durations = list()
+
+ def parse_file(self, filename, ninja_start_time):
+ # ensure file exist
+ if not os.path.exists(filename):
+ print("file: {} not exists".format(filename))
+ return False
+ storing_data = {}
+ with open(filename, mode='r') as f:
+ firstline = f.readline()
+ if firstline != KFILESIGNATURE:
+ print("unrecognized ninja log format, we need {}".format(
+ KFILESIGNATURE))
+
+ for _, line in enumerate(f.readlines()):
+ start, end, time_stamp, name, cmdhash = line.strip().split(
+ '\t')
+ if time_stamp < ninja_start_time:
+ continue
+ storing_data.setdefault(cmdhash, StoringDataLine(start, end))
+ storing_data.get(cmdhash).target_obj_names.append(name)
+
+ self.datalist = sorted(storing_data.values(),
+ key=lambda line: line.start)
+ self.durations = sorted(storing_data.values(),
+ key=lambda line: line.end - line.start,
+ reverse=True)
+ return True
+
+ def save_durations(self, duration_file):
+ total_time = 0
+ with open(duration_file, 'w') as file:
+ for item in self.durations:
+ duration = item.end - item.start
+ total_time += duration
+ file.write('{}: {}\n'.format(item.target_obj_names[0],
+ duration))
+ file.write('total time: {} ms'.format(total_time))
+
+ def trans_to_trace_json(self, dest_file_name):
+ counter = CountingTheTid()
+ tracelist = list()
+ for storingdataline in self.datalist:
+ tracelist.append({
+ 'name': '%0s' % ', '.join(storingdataline.target_obj_names),
+ 'cat': 'targets',
+ 'ph': 'X',
+ 'ts': str(storingdataline.start * 1000),
+ 'dur': str((storingdataline.end - storingdataline.start) * 1000),
+ 'pid': str(0),
+ 'tid': str(counter.counting_the_new_tid(storingdataline)),
+ 'args': {},
+ })
+
+ if not dest_file_name.endswith('.gz'):
+ dest_file_name = dest_file_name + '.gz'
+
+ if os.path.exists(dest_file_name):
+ shutil.move(
+ dest_file_name, '%s/build.trace.%d.gz' %
+ (os.path.dirname(dest_file_name),
+ int(os.stat(dest_file_name).st_mtime)))
+
+ with gzip.open(dest_file_name, "wt") as f:
+ json.dump(tracelist, f)
+
+
+class CountingTheTid(object):
+ def __init__(self):
+ self.tids = [] # store the tid's end time
+
+ def counting_the_new_tid(self, storingdataline):
+ for i in range(len(self.tids)):
+ if self.tids[i] <= storingdataline.start:
+ self.tids[i] = storingdataline.end
+ return i # renew the endtime and return the current tid
+
+ # for the end time is newer than all tids so we need a new one
+ self.tids.append(storingdataline.end)
+ return len(self.tids) - 1 # the last index of the tids
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--ninja-log', help='path to ninja log')
+ parser.add_argument('--trace-file', help='path to build trace file')
+ parser.add_argument('--duration-file', help='path to duration file')
+ parser.add_argument(
+ '--ninja-start-time',
+ help='epoch time of "Starting ninja ..." in nanoseconds')
+
+ options = parser.parse_args()
+ myparser = NinjaToTrace()
+ if not myparser.parse_file(options.ninja_log, options.ninja_start_time):
+ print("parse file fail")
+ return
+
+ myparser.trans_to_trace_json(options.trace_file)
+ myparser.save_durations(options.duration_file)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/ninja_rules_parser.py b/dsoftbus/build/scripts/ninja_rules_parser.py
new file mode 100755
index 0000000000000000000000000000000000000000..15a14877518a3563a0437b3e6cd14ba28d27e6f2
--- /dev/null
+++ b/dsoftbus/build/scripts/ninja_rules_parser.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+from scripts.util.file_utils import read_json_file, read_file, write_file
+
+
+def _read_subninja_build(build_dir, subninja_build_file):
+ subninja_build_file = os.path.join(build_dir, subninja_build_file)
+ if not os.path.exists(subninja_build_file):
+ raise Exception("file '{}' doesn't exist.".format(subninja_build_file))
+ subninja_build = read_file(subninja_build_file)
+ if subninja_build is None:
+ raise Exception("read file '{}' failed.".format(subninja_build_file))
+ support_lib_type = ['.a', '.so', '']
+ label_name = ''
+ label_target = ''
+ for _line in subninja_build:
+ if _line.startswith('label_name = '):
+ label_name = _line[len('label_name = '):]
+ elif _line.startswith('build '):
+ _build_info = _line.split(':')[0]
+ build_label = _build_info.split(' ')[1]
+ _, extension = os.path.splitext(build_label)
+ if extension in support_lib_type:
+ label_target = build_label
+
+ if label_target != '':
+ if label_name == '':
+ target_filename = os.path.basename(label_target)
+ label_name, _ = os.path.splitext(target_filename)
+ return {label_name: label_target}
+ return None
+
+
+def _parse_target_label(build_label_list, toolchain_name):
+ phony_targets_dict = {}
+ for build_label in build_label_list:
+ target_filename = os.path.basename(build_label)
+ label_name, _ = os.path.splitext(target_filename)
+ if label_name:
+ phony_targets_dict[label_name] = build_label
+ start_index = len('{}/obj/'.format(toolchain_name))
+ _path = os.path.dirname(build_label)[start_index:]
+ if _path:
+ phony_targets_dict[_path] = build_label
+ if label_name and _path:
+ _label_path = '{}$:{}'.format(_path, label_name)
+ phony_targets_dict[_label_path] = build_label
+ return phony_targets_dict
+
+
+def _read_toolchain_ninja(build_dir, toolchain_ninja_file, toolchain_name):
+ if not os.path.exists(toolchain_ninja_file):
+ raise Exception(
+ "file '{}' doesn't exist.".format(toolchain_ninja_file))
+ toolchain_ninja_rules = read_file(toolchain_ninja_file)
+ if toolchain_ninja_rules is None:
+ raise Exception("read file '{}' failed.".format(toolchain_ninja_file))
+
+ build_label_list = []
+ subninja_targets = {}
+ for _ninja_rule in toolchain_ninja_rules:
+ if _ninja_rule.startswith('build '):
+ _tmp = _ninja_rule.split(':')[0]
+ _label = _tmp[len('build '):]
+ if not _label.endswith('.stamp'):
+ continue
+ build_label_list.append(_label)
+ if _ninja_rule.startswith('subninja '):
+ sbuninja_file = _ninja_rule[len('subninja '):]
+ subninja_target_info = _read_subninja_build(
+ build_dir, sbuninja_file)
+ if subninja_target_info:
+ subninja_targets.update(subninja_target_info)
+ build_phony_targets = _parse_target_label(build_label_list, toolchain_name)
+ build_phony_targets.update(subninja_targets)
+ return build_phony_targets
+
+
+def _read_variants_toolchain_info(variants_toolchain_info_file):
+ if not os.path.exists(variants_toolchain_info_file):
+ raise Exception(
+ "file '{}' doesn't exist.".format(variants_toolchain_info_file))
+ variants_toolchain_info = read_json_file(variants_toolchain_info_file)
+ if variants_toolchain_info is None:
+ raise Exception(
+ "read file '{}' failed.".format(variants_toolchain_info_file))
+ platform_toolchain = variants_toolchain_info.get('platform_toolchain')
+ return platform_toolchain
+
+
+def _read_build_ninja(build_ninja_file):
+ if not os.path.exists(build_ninja_file):
+ raise Exception("file '{}' doesn't exist.".format(build_ninja_file))
+ ninja_targets = read_file(build_ninja_file)
+ if ninja_targets is None:
+ raise Exception("read file '{}' failed.".format(build_ninja_file))
+ return ninja_targets
+
+
+def generate_phony_targets(build_dir, toolchain_ninja_file, platform,
+ toolchain_name, default_targets_name):
+ build_phony_targets = _read_toolchain_ninja(build_dir,
+ toolchain_ninja_file,
+ toolchain_name)
+ targets_list = []
+ for key, build_label in build_phony_targets.items():
+ targets_list.append('build {}/{}: phony {}'.format(
+ platform, key, build_label))
+
+ _diff_targets = set(default_targets_name).difference(
+ set(build_phony_targets.keys()))
+ for _diff_target in _diff_targets:
+ targets_list.append('build {}/{}: phony {}'.format(
+ platform, _diff_target, _diff_target))
+ build_file = os.path.join(os.path.dirname(toolchain_ninja_file),
+ '{}_build.ninja'.format(platform))
+ write_file(build_file, '{}\n\n'.format('\n'.join(targets_list)))
+ return build_file
+
+
+def _update_build_ninja(build_dir, include_files):
+ try:
+ ninja_build_file = os.path.join(build_dir, 'build.ninja')
+ if not os.path.exists(ninja_build_file):
+ raise Exception(
+ "file '{}' doesn't exist.".format(ninja_build_file))
+ with open(ninja_build_file, 'a+') as _file:
+ data = []
+ for line in _file.readlines():
+ _line = line.rstrip('\n')
+ if _line.startswith('subninja '):
+ data.append(_line)
+ for include_file in include_files:
+ include_info = 'subninja {}'.format(
+ os.path.relpath(include_file, build_dir))
+ if include_info in data:
+ continue
+ _file.write('{}\n'.format(include_info))
+ _file.flush()
+ except:
+ raise
+
+
+def update(build_dir, variants_toolchain_info_file):
+ variants_toolchain_info_file = os.path.join(build_dir,
+ variants_toolchain_info_file)
+ platform_toolchain = _read_variants_toolchain_info(
+ variants_toolchain_info_file)
+
+ ninja_build_file = os.path.join(build_dir, 'build.ninja')
+ default_ninja_targets = _read_build_ninja(ninja_build_file)
+ default_targets_name = []
+ for _ninja_target in default_ninja_targets:
+ if not _ninja_target.startswith('build '):
+ continue
+ _ninja_target = _ninja_target.split(': ')[0]
+ default_targets_name.append(_ninja_target[len('build '):])
+
+ include_files = []
+ for platform, toolchain_label in platform_toolchain.items():
+ if platform == 'phone':
+ continue
+ toolchain_name = toolchain_label.split(':')[1]
+ toolchain_ninja_file = os.path.join(build_dir, toolchain_name,
+ 'toolchain.ninja')
+ if not os.path.exists(toolchain_ninja_file):
+ continue
+ _build_file = generate_phony_targets(build_dir, toolchain_ninja_file,
+ platform, toolchain_name,
+ default_targets_name)
+ include_files.append(_build_file)
+ _update_build_ninja(build_dir, include_files)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--source-root-dir', required=True)
+ parser.add_argument('--root-build-dir', required=True)
+ parser.add_argument('--variants-toolchain-info-file', required=True)
+ args = parser.parse_args()
+
+ source_root_dir = args.source_root_dir
+ if not os.path.exists(os.path.join(source_root_dir, '.gn')):
+ print('source root dir incorrect.')
+ return 1
+ build_dir = os.path.join(source_root_dir, args.root_build_dir)
+ update(build_dir, args.variants_toolchain_info_file)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/summary_ccache_hitrate.py b/dsoftbus/build/scripts/summary_ccache_hitrate.py
new file mode 100755
index 0000000000000000000000000000000000000000..f797cd587796ab6156299d40b4a28ee70dcab8d4
--- /dev/null
+++ b/dsoftbus/build/scripts/summary_ccache_hitrate.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+#-*- coding: UTF-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import string
+import sys
+import subprocess
+
+
+def summary_ccache_new(ccache_log):
+ hit_dir_num = 0
+ hit_pre_num = 0
+ mis_num = 0
+ hit_rate = 0
+ mis_rate = 0
+ hit_dir_str = "Result: cache hit (direct)"
+ hit_pre_str = "Result: cache hit (preprocessed)"
+ mis_str = "Result: cache miss"
+ if os.path.exists(ccache_log):
+ cmd = "grep -c \'{}\' {}".format(hit_dir_str, ccache_log)
+ hit_dir_num = int(
+ subprocess.Popen(cmd, shell=True,
+ stdout=subprocess.PIPE).communicate()[0])
+ cmd = "grep -c \'{}\' {}".format(hit_pre_str, ccache_log)
+ hit_pre_num = int(
+ subprocess.Popen(cmd, shell=True,
+ stdout=subprocess.PIPE).communicate()[0])
+ cmd = "grep -c \'{}\' {}".format(mis_str, ccache_log)
+ mis_num = int(
+ subprocess.Popen(cmd, shell=True,
+ stdout=subprocess.PIPE).communicate()[0])
+ sum_ccache = hit_dir_num + hit_pre_num + mis_num
+ if sum_ccache != 0:
+ hit_rate = (float(hit_dir_num) + float(hit_pre_num)) / float(sum_ccache)
+ mis_rate = float(mis_num) / float(sum_ccache)
+ return hit_rate, mis_rate, hit_dir_num, hit_pre_num, mis_num
+
+
+def main():
+ if len(sys.argv) < 2:
+ print("Error, please input the ccache log file path.")
+ exit(-1)
+
+ ccache_log = sys.argv[1]
+ hit_rate = 0
+ mis_rate = 0
+ hit_dir_num = 0
+ hit_pre_num = 0
+ mis_num = 0
+ if os.path.exists(ccache_log):
+ hit_rate, mis_rate, hit_dir_num, hit_pre_num, mis_num = summary_ccache_new(
+ ccache_log)
+
+ print("---------------------------------------------")
+ print("ccache summary:")
+ print("cache hit (direct) : " + str(hit_dir_num))
+ print("cache hit (preprocessed) : " + str(hit_pre_num))
+ print("cache miss : " + str(mis_num))
+ print("hit rate: %.2f%% " % (hit_rate * 100))
+ print("mis rate: %.2f%% " % (mis_rate * 100))
+ print("---------------------------------------------")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/dsoftbus/build/scripts/tools_checker.py b/dsoftbus/build/scripts/tools_checker.py
new file mode 100644
index 0000000000000000000000000000000000000000..0889ed3fb587e21790b8e317fab59d1dbb136622
--- /dev/null
+++ b/dsoftbus/build/scripts/tools_checker.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import subprocess
+
+
+def run_command(cmd, verbose=None):
+ if verbose:
+ print("Running: {}".format(' '.join(cmd)))
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ output, _ = p.communicate()
+ if verbose:
+ print(output.decode().rstrip())
+ return output, p.returncode
+
+
+def package_installed(pkg_name):
+ cmd = ['dpkg', '-s', pkg_name]
+ _, r = run_command(cmd)
+ return r
+
+
+def check_build_repuried_packages():
+ _build_package_list = [
+ 'binutils', 'flex', 'bison', 'git', 'build-essential', 'zip', 'curl',
+ 'unzip', 'm4', 'wget', 'perl', 'bc'
+ ]
+ for pkg in _build_package_list:
+ if package_installed(pkg):
+ print("\033[33m {} is not installed. please install it.\033[0m".
+ format(pkg))
+ sys.exit(1)
+
+
+def check_os_version():
+ available_os = ('Ubuntu')
+ available_releases = ('18.04', '20.04')
+ _os_info, _returncode = run_command(['cat', '/etc/issue'])
+ if _returncode != 0:
+ return -1
+
+ _os_info = _os_info.decode().rstrip().split()
+ host_os = _os_info[0]
+ host_version = _os_info[1]
+ if host_os not in available_os:
+ print("\033[33m OS {} is not supported for ohos build.\033[0m".format(
+ host_os))
+ return -1
+ version_available = False
+ for _version in available_releases:
+ if host_version == _version or host_version.startswith(_version):
+ version_available = True
+ break
+ if not version_available:
+ print("\033[33m OS version {} is not supported for ohos build.\033[0m".
+ format(host_version))
+ print("\033[33m Avaliable OS version are {}.\033[0m".format(
+ ', '.join(available_releases)))
+ return -1
+ return 0
+
+
+def main():
+ check_result = check_os_version()
+ if check_result != 0:
+ return
+
+ check_build_repuried_packages()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/util/__init__.py b/dsoftbus/build/scripts/util/__init__.py
new file mode 100755
index 0000000000000000000000000000000000000000..d7b3a083706fb60581a81c2a917e927139f61f7f
--- /dev/null
+++ b/dsoftbus/build/scripts/util/__init__.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/dsoftbus/build/scripts/util/build_utils.py b/dsoftbus/build/scripts/util/build_utils.py
new file mode 100755
index 0000000000000000000000000000000000000000..f8f9cdac29b15441f7c1bcd9d6adbc2a7e407acc
--- /dev/null
+++ b/dsoftbus/build/scripts/util/build_utils.py
@@ -0,0 +1,732 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Contains common helpers for GN action()s."""
+
+import collections
+import contextlib
+import filecmp
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+import zipfile
+import optparse
+
+# Any new non-system import must be added to:
+
+# Some clients do not add //build/scripts/util to PYTHONPATH.
+from . import md5_check # pylint: disable=relative-import
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+import gn_helpers
+
+# Definition copied from pylib/constants/__init__.py to avoid adding
+# a dependency on pylib.
+DIR_SOURCE_ROOT = os.environ.get(
+ 'CHECKOUT_SOURCE_ROOT',
+ os.path.abspath(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
+ os.pardir, os.pardir)))
+
+HERMETIC_TIMESTAMP = (2001, 1, 1, 0, 0, 0)
+_HERMETIC_FILE_ATTR = (0o644 << 16)
+
+
+@contextlib.contextmanager
+def temp_dir():
+ dirname = tempfile.mkdtemp()
+ try:
+ yield dirname
+ finally:
+ shutil.rmtree(dirname)
+
+
+def make_directory(dir_path):
+ try:
+ os.makedirs(dir_path)
+ except OSError:
+ pass
+
+
+def delete_directory(dir_path):
+ if os.path.exists(dir_path):
+ shutil.rmtree(dir_path)
+
+
+def touch(path, fail_if_missing=False):
+ if fail_if_missing and not os.path.exists(path):
+ raise Exception(path + ' doesn\'t exist.')
+
+ make_directory(os.path.dirname(path))
+ with open(path, 'a'):
+ os.utime(path, None)
+
+
+def find_in_directory(directory, filename_filter):
+ files = []
+ for root, _dirnames, filenames in os.walk(directory):
+ matched_files = fnmatch.filter(filenames, filename_filter)
+ files.extend((os.path.join(root, f) for f in matched_files))
+ return files
+
+
+def read_build_vars(path):
+ """Parses a build_vars.txt into a dict."""
+ with open(path) as f:
+ return dict(l.rstrip().split('=', 1) for l in f)
+
+
+def parse_gn_list(gn_string):
+ """Converts a command-line parameter into a list.
+
+ If the input starts with a '[' it is assumed to be a GN-formatted list and
+ it will be parsed accordingly. When empty an empty list will be returned.
+ Otherwise, the parameter will be treated as a single raw string (not
+ GN-formatted in that it's not assumed to have literal quotes that must be
+ removed) and a list will be returned containing that string.
+
+ The common use for this behavior is in the ohos build where things can
+ take lists of @FileArg references that are expanded via expand_file_args.
+ """
+ if gn_string.startswith('['):
+ parser = gn_helpers.GNValueParser(gn_string)
+ return parser.ParseList()
+ if len(gn_string):
+ return [gn_string]
+ return []
+
+
+def parse_and_flatten_gn_lists(gn_lists):
+ ret = []
+ for arg in gn_lists:
+ ret.extend(parse_gn_list(arg))
+ return ret
+
+
+def check_options(options, parser, required=None):
+ if not required:
+ return
+ for option_name in required:
+ if getattr(options, option_name) is None:
+ parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def write_json(obj, path, only_if_changed=False):
+ old_dump = None
+ if os.path.exists(path):
+ with open(path, 'r') as oldfile:
+ old_dump = oldfile.read()
+
+ new_dump = json.dumps(obj,
+ sort_keys=True,
+ indent=2,
+ separators=(',', ': '))
+
+ if not only_if_changed or old_dump != new_dump:
+ with open(path, 'w') as outfile:
+ outfile.write(new_dump)
+
+
+@contextlib.contextmanager
+def atomic_output(path, only_if_changed=True):
+ """Helper to prevent half-written outputs.
+
+ Args:
+ path: Path to the final output file, which will be written atomically.
+ only_if_changed: If True (the default), do not touch the filesystem
+ if the content has not changed.
+ Returns:
+ A python context manager that yields a NamedTemporaryFile instance
+ that must be used by clients to write the data to. On exit, the
+ manager will try to replace the final output file with the
+ temporary one if necessary. The temporary file is always destroyed
+ on exit.
+ Example:
+ with build_utils.atomic_output(output_path) as tmp_file:
+ subprocess.check_call(['prog', '--output', tmp_file.name])
+ """
+ # Create in same directory to ensure same filesystem when moving.
+ with tempfile.NamedTemporaryFile(suffix=os.path.basename(path),
+ dir=os.path.dirname(path),
+ delete=False) as f:
+ try:
+ # Change tempfile permission to 664
+ os.fchmod(f.fileno(), 0o664)
+ yield f
+
+ # file should be closed before comparison/move.
+ f.close()
+ if not (only_if_changed and os.path.exists(path)
+ and filecmp.cmp(f.name, path)):
+ shutil.move(f.name, path)
+ finally:
+ if os.path.exists(f.name):
+ os.unlink(f.name)
+
+
+class called_process_error(Exception):
+ """This exception is raised when the process run by check_output
+ exits with a non-zero exit code."""
+ def __init__(self, cwd, args, output):
+ super(called_process_error, self).__init__()
+ self.cwd = cwd
+ self.args = args
+ if isinstance(output, bytes):
+ self.output = output.decode()
+ else:
+ self.output = output
+
+ def __str__(self):
+ # A user should be able to simply copy and paste the command that failed
+ # into their shell.
+ copyable_command = '( cd {}; {} )'.format(
+ os.path.abspath(self.cwd), ' '.join(map(pipes.quote, self.args)))
+ return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+def filter_lines(output, filter_string):
+ """Output filter from build_utils.check_output.
+
+ Args:
+ output: Executable output as from build_utils.check_output.
+ filter_string: An RE string that will filter (remove) matching
+ lines from |output|.
+
+ Returns:
+ The filtered output, as a single string.
+ """
+ re_filter = re.compile(filter_string)
+ return '\n'.join(line for line in output.splitlines()
+ if not re_filter.search(line))
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.called_process_error.
+def check_output(args,
+ cwd=None,
+ env=None,
+ print_stdout=False,
+ print_stderr=True,
+ stdout_filter=None,
+ stderr_filter=None,
+ fail_func=lambda returncode, stderr: returncode != 0):
+ if not cwd:
+ cwd = os.getcwd()
+
+ child = subprocess.Popen(args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=cwd,
+ env=env)
+ stdout, stderr = child.communicate()
+
+ if stdout_filter is not None:
+ stdout = stdout_filter(stdout)
+
+ if stderr_filter is not None:
+ stderr = stderr_filter(stderr)
+ if isinstance(stdout, bytes):
+ stdout = stdout.decode()
+ if isinstance(stderr, bytes):
+ stderr = stderr.decode()
+
+ if fail_func(child.returncode, stderr):
+ raise called_process_error(cwd, args, stdout + stderr)
+
+ if print_stdout:
+ if isinstance(stdout, bytes):
+ stdout = stdout.decode()
+ if stdout:
+ sys.stdout.write(stdout)
+ if print_stderr:
+ if isinstance(stderr, bytes):
+ stderr = stderr.decode()
+ if stderr:
+ sys.stderr.write(stderr)
+ return stdout
+
+
+def get_modified_time(path):
+ # For a symlink, the modified time should be the greater of the link's
+ # modified time and the modified time of the target.
+ return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def is_time_stale(output, inputs):
+ if not os.path.exists(output):
+ return True
+
+ output_time = get_modified_time(output)
+ for i in inputs:
+ if get_modified_time(i) > output_time:
+ return True
+ return False
+
+
+def _check_zip_path(name):
+ if os.path.normpath(name) != name:
+ raise Exception('Non-canonical zip path: %s' % name)
+ if os.path.isabs(name):
+ raise Exception('Absolute zip path: %s' % name)
+
+
+def _is_symlink(zip_file, name):
+ zi = zip_file.getinfo(name)
+
+ # The two high-order bytes of ZipInfo.external_attr represent
+ # UNIX permissions and file type bits.
+ return stat.S_ISLNK(zi.external_attr >> 16)
+
+
+def extract_all(zip_path,
+ path=None,
+ no_clobber=True,
+ pattern=None,
+ predicate=None):
+ if path is None:
+ path = os.getcwd()
+ elif not os.path.exists(path):
+ make_directory(path)
+
+ if not zipfile.is_zipfile(zip_path):
+ raise Exception('Invalid zip file: %s' % zip_path)
+
+ extracted = []
+ with zipfile.ZipFile(zip_path) as z:
+ for name in z.namelist():
+ if name.endswith('/'):
+ make_directory(os.path.join(path, name))
+ continue
+ if pattern is not None:
+ if not fnmatch.fnmatch(name, pattern):
+ continue
+ if predicate and not predicate(name):
+ continue
+ _check_zip_path(name)
+ if no_clobber:
+ output_path = os.path.join(path, name)
+ if os.path.exists(output_path):
+ raise Exception('Path already exists from zip: %s %s %s' %
+ (zip_path, name, output_path))
+ if _is_symlink(z, name):
+ dest = os.path.join(path, name)
+ make_directory(os.path.dirname(dest))
+ os.symlink(z.read(name), dest)
+ extracted.append(dest)
+ else:
+ z.extract(name, path)
+ extracted.append(os.path.join(path, name))
+
+ return extracted
+
+
+def add_to_zip_hermetic(zip_file,
+ zip_path,
+ src_path=None,
+ data=None,
+ compress=None):
+ """Adds a file to the given ZipFile with a hard-coded modified time.
+
+ Args:
+ zip_file: ZipFile instance to add the file to.
+ zip_path: Destination path within the zip file.
+ src_path: Path of the source file. Mutually exclusive with |data|.
+ data: File data as a string.
+ compress: Whether to enable compression. Default is taken from ZipFile
+ constructor.
+ """
+ assert (src_path is None) != (data is None), (
+ '|src_path| and |data| are mutually exclusive.')
+ _check_zip_path(zip_path)
+ zipinfo = zipfile.ZipInfo(filename=zip_path, date_time=HERMETIC_TIMESTAMP)
+ zipinfo.external_attr = _HERMETIC_FILE_ATTR
+
+ if src_path and os.path.islink(src_path):
+ zipinfo.filename = zip_path
+ zipinfo.external_attr |= stat.S_IFLNK << 16 # mark as a symlink
+ zip_file.writestr(zipinfo, os.readlink(src_path))
+ return
+
+ # we want to use _HERMETIC_FILE_ATTR, so manually set
+ # the few attr bits we care about.
+ if src_path:
+ st = os.stat(src_path)
+ for mode in (stat.S_IXUSR, stat.S_IXGRP, stat.S_IXOTH):
+ if st.st_mode & mode:
+ zipinfo.external_attr |= mode << 16
+
+ if src_path:
+ with open(src_path, 'rb') as f:
+ data = f.read()
+
+ # zipfile will deflate even when it makes the file bigger. To avoid
+ # growing files, disable compression at an arbitrary cut off point.
+ if len(data) < 16:
+ compress = False
+
+ # None converts to ZIP_STORED, when passed explicitly rather than the
+ # default passed to the ZipFile constructor.
+ compress_type = zip_file.compression
+ if compress is not None:
+ compress_type = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ zip_file.writestr(zipinfo, data, compress_type)
+
+
+def do_zip(inputs,
+ output,
+ base_dir=None,
+ compress_fn=None,
+ zip_prefix_path=None):
+ """Creates a zip file from a list of files.
+
+ Args:
+ inputs: A list of paths to zip, or a list of (zip_path, fs_path) tuples.
+ output: Destination .zip file.
+ base_dir: Prefix to strip from inputs.
+ compress_fn: Applied to each input to determine whether or not to compress.
+ By default, items will be |zipfile.ZIP_STORED|.
+ zip_prefix_path: Path prepended to file path in zip file.
+ """
+ input_tuples = []
+ for tup in inputs:
+ if isinstance(tup, str):
+ tup = (os.path.relpath(tup, base_dir), tup)
+ input_tuples.append(tup)
+
+ # Sort by zip path to ensure stable zip ordering.
+ input_tuples.sort(key=lambda tup: tup[0])
+ with zipfile.ZipFile(output, 'w') as outfile:
+ for zip_path, fs_path in input_tuples:
+ if zip_prefix_path:
+ zip_path = os.path.join(zip_prefix_path, zip_path)
+ compress = compress_fn(zip_path) if compress_fn else None
+ add_to_zip_hermetic(outfile,
+ zip_path,
+ src_path=fs_path,
+ compress=compress)
+
+
+def zip_dir(output, base_dir, compress_fn=None, zip_prefix_path=None):
+ """Creates a zip file from a directory."""
+ inputs = []
+ for root, _, files in os.walk(base_dir):
+ for f in files:
+ inputs.append(os.path.join(root, f))
+
+ with atomic_output(output) as f:
+ do_zip(inputs,
+ f,
+ base_dir,
+ compress_fn=compress_fn,
+ zip_prefix_path=zip_prefix_path)
+
+
+def matches_glob(path, filters):
+ """Returns whether the given path matches any of the given glob patterns."""
+ return filters and any(fnmatch.fnmatch(path, f) for f in filters)
+
+
+def _strip_dst_name(dst_name, options):
+ # Strip specific directories and file if options is not None
+ if options and options.stripFile:
+ for f in options.stripFile:
+ if fnmatch.fnmatch(dst_name, '*/' + f):
+ return True
+ if options and options.stripDir:
+ for d in options.stripDir:
+ if fnmatch.fnmatch(dst_name, d + '/*'):
+ return True
+ return False
+
+
+def merge_zips(output, input_zips, path_transform=None, merge_args=None):
+ """Combines all files from |input_zips| into |output|.
+
+ Args:
+ output: Path or ZipFile instance to add files to.
+ input_zips: Iterable of paths to zip files to merge.
+ path_transform: Called for each entry path. Returns a new path, or None to
+ skip the file.
+ """
+ options = None
+ if merge_args:
+ parser = optparse.OptionParser()
+ parser.add_option('--stripDir',
+ action='append',
+ help='strip specific directory')
+ parser.add_option('--stripFile',
+ action='append',
+ help='strip specific file.')
+
+ args = expand_file_args(merge_args)
+ options, _ = parser.parse_args(args)
+
+ path_transform = path_transform or (lambda p: p)
+ added_names = set()
+
+ output_is_already_open = not isinstance(output, str)
+ if output_is_already_open:
+ assert isinstance(output, zipfile.ZipFile)
+ out_zip = output
+ else:
+ out_zip = zipfile.ZipFile(output, 'w')
+
+ try:
+ for in_file in input_zips:
+ with zipfile.ZipFile(in_file, 'r') as in_zip:
+ # ijar creates zips with null CRCs.
+ in_zip._expected_crc = None
+ for info in in_zip.infolist():
+ # Ignore directories.
+ if info.filename[-1] == '/':
+ continue
+ dst_name = path_transform(info.filename)
+ if not dst_name:
+ continue
+ if _strip_dst_name(dst_name, options):
+ continue
+ already_added = dst_name in added_names
+ if not already_added:
+ add_to_zip_hermetic(
+ out_zip,
+ dst_name,
+ data=in_zip.read(info),
+ compress=info.compress_type != zipfile.ZIP_STORED)
+ added_names.add(dst_name)
+ finally:
+ if not output_is_already_open:
+ out_zip.close()
+
+
+def get_sorted_transitive_dependencies(top, deps_func):
+ """Gets the list of all transitive dependencies in sorted order.
+
+ There should be no cycles in the dependency graph (crashes if cycles exist).
+
+ Args:
+ top: A list of the top level nodes
+ deps_func: A function that takes a node and returns a list of its direct
+ dependencies.
+ Returns:
+ A list of all transitive dependencies of nodes in top, in order (a node
+ will appear in the list at a higher index than all of its dependencies).
+ """
+ # Find all deps depth-first, maintaining original order in the case of ties.
+ deps_map = collections.OrderedDict()
+
+ def discover(nodes):
+ for node in nodes:
+ if node in deps_map:
+ continue
+ deps = deps_func(node)
+ discover(deps)
+ deps_map[node] = deps
+
+ discover(top)
+ return list(deps_map.keys())
+
+
+def _compute_python_dependencies():
+ """Gets the paths of imported non-system python modules.
+
+ A path is assumed to be a "system" import if it is outside of chromium's
+ src/. The paths will be relative to the current directory.
+ """
+ _force_lazy_modules_to_load()
+ module_paths = (m.__file__ for m in sys.modules.values()
+ if m is not None and hasattr(m, '__file__') and m.__file__)
+ abs_module_paths = list(map(os.path.abspath, module_paths))
+
+ assert os.path.isabs(DIR_SOURCE_ROOT)
+ non_system_module_paths = [
+ p for p in abs_module_paths if p.startswith(DIR_SOURCE_ROOT)
+ ]
+
+ def convert_pyc_to_py(s):
+ if s.endswith('.pyc'):
+ return s[:-1]
+ return s
+
+ non_system_module_paths = list(
+ map(convert_pyc_to_py, non_system_module_paths))
+ non_system_module_paths = list(
+ map(os.path.relpath, non_system_module_paths))
+ return sorted(set(non_system_module_paths))
+
+
+def _force_lazy_modules_to_load():
+ """Forces any lazily imported modules to fully load themselves.
+
+ Inspecting the modules' __file__ attribute causes lazily imported modules
+ (e.g. from email) to get fully imported and update sys.modules. Iterate
+ over the values until sys.modules stabilizes so that no modules are missed.
+ """
+ while True:
+ num_modules_before = len(list(sys.modules.keys()))
+ for m in list(sys.modules.values()):
+ if m is not None and hasattr(m, '__file__'):
+ _ = m.__file__
+ num_modules_after = len(list(sys.modules.keys()))
+ if num_modules_before == num_modules_after:
+ break
+
+
+def add_depfile_option(parser):
+ if hasattr(parser, 'add_option'):
+ func = parser.add_option
+ else:
+ func = parser.add_argument
+ func('--depfile', help='Path to depfile (refer to `gn help depfile`)')
+
+
+def write_depfile(depfile_path, first_gn_output, inputs=None, add_pydeps=True):
+ assert depfile_path != first_gn_output # http://crbug.com/646165
+ inputs = inputs or []
+ if add_pydeps:
+ inputs = _compute_python_dependencies() + inputs
+ inputs = sorted(inputs)
+ make_directory(os.path.dirname(depfile_path))
+ # Ninja does not support multiple outputs in depfiles.
+ with open(depfile_path, 'w') as depfile:
+ depfile.write(first_gn_output.replace(' ', '\\ '))
+ depfile.write(': ')
+ depfile.write(' '.join(i.replace(' ', '\\ ') for i in inputs))
+ depfile.write('\n')
+
+
+def expand_file_args(args):
+ """Replaces file-arg placeholders in args.
+
+ These placeholders have the form:
+ @FileArg(filename:key1:key2:...:keyn)
+
+ The value of such a placeholder is calculated by reading 'filename' as json.
+ And then extracting the value at [key1][key2]...[keyn].
+
+ Note: This intentionally does not return the list of files that appear in
+ such placeholders. An action that uses file-args *must* know the paths of
+ those files prior to the parsing of the arguments (typically by explicitly
+ listing them in the action's inputs in build files).
+ """
+ new_args = list(args)
+ file_jsons = dict()
+ r = re.compile(r'@FileArg\((.*?)\)')
+ for i, arg in enumerate(args):
+ match = r.search(arg)
+ if not match:
+ continue
+
+ if match.end() != len(arg):
+ raise Exception(
+ 'Unexpected characters after FileArg: {}'.format(arg))
+
+ lookup_path = match.group(1).split(':')
+ file_path = lookup_path[0]
+ if file_path not in file_jsons:
+ with open(file_path) as f:
+ file_jsons[file_path] = json.load(f)
+
+ expansion = file_jsons[file_path]
+ for k in lookup_path[1:]:
+ expansion = expansion[k]
+
+ # This should match parse_gn_list. The output is either a GN-formatted list
+ # or a literal (with no quotes).
+ if isinstance(expansion, list):
+ new_args[i] = arg[:match.start()] + gn_helpers.ToGNString(
+ expansion)
+ else:
+ new_args[i] = arg[:match.start()] + str(expansion)
+
+ return new_args
+
+
+def read_sources_list(sources_list_file_name):
+ """Reads a GN-written file containing list of file names and returns a list.
+
+ Note that this function should not be used to parse response files.
+ """
+ with open(sources_list_file_name) as f:
+ return [file_name.strip() for file_name in f]
+
+
+def call_and_write_depfile_if_stale(function,
+ options,
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False,
+ depfile_deps=None,
+ add_pydeps=True):
+ """Wraps md5_check.call_and_record_if_stale() and writes a depfile if applicable.
+
+ Depfiles are automatically added to output_paths when present in the
+ |options| argument. They are then created after |function| is called.
+
+ By default, only python dependencies are added to the depfile. If there are
+ other input paths that are not captured by GN deps, then they should be
+ listed in depfile_deps. It's important to write paths to the depfile that
+ are already captured by GN deps since GN args can cause GN deps to change,
+ and such changes are not immediately reflected in depfiles
+ (http://crbug.com/589311).
+ """
+ if not output_paths:
+ raise Exception('At least one output_path must be specified.')
+ input_paths = list(input_paths or [])
+ input_strings = list(input_strings or [])
+ output_paths = list(output_paths or [])
+
+ python_deps = None
+ if hasattr(options, 'depfile') and options.depfile:
+ python_deps = _compute_python_dependencies()
+ input_paths += python_deps
+ output_paths += [options.depfile]
+
+ def on_stale_md5(changes):
+ args = (changes, ) if pass_changes else ()
+ function(*args)
+ if python_deps is not None:
+ all_depfile_deps = list(python_deps) if add_pydeps else []
+ if depfile_deps:
+ all_depfile_deps.extend(depfile_deps)
+ write_depfile(options.depfile,
+ output_paths[0],
+ all_depfile_deps,
+ add_pydeps=False)
+
+ md5_check.call_and_record_if_stale(on_stale_md5,
+ record_path=record_path,
+ input_paths=input_paths,
+ input_strings=input_strings,
+ output_paths=output_paths,
+ force=force,
+ pass_changes=True)
+
+
+def get_all_files(base, follow_symlinks=False):
+ """Returns a list of all the files in |base|. Each entry is relative to the
+ last path entry of |base|."""
+ result = []
+ for root, _, files in os.walk(base, followlinks=follow_symlinks):
+ result.extend([os.path.join(root, f) for f in files])
+
+ return result
+
+
+def rebase_path(input, new_base=None, current_base="."):
+ if new_base:
+ return os.path.relpath(os.path.join(current_base, input), new_base)
+ else:
+ return os.path.realpath(os.path.join(current_base, input))
diff --git a/dsoftbus/build/scripts/util/file_utils.py b/dsoftbus/build/scripts/util/file_utils.py
new file mode 100755
index 0000000000000000000000000000000000000000..aee1a58dafda222794343a205c67ceabc682ab41
--- /dev/null
+++ b/dsoftbus/build/scripts/util/file_utils.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import os
+import subprocess
+import hashlib
+
+
+# Read json file data
+def read_json_file(input_file):
+ if not os.path.exists(input_file):
+ print("file '{}' doesn't exist.".format(input_file))
+ return None
+
+ data = None
+ try:
+ with open(input_file, 'r') as input_f:
+ data = json.load(input_f)
+ except json.decoder.JSONDecodeError:
+ print("The file '{}' format is incorrect.".format(input_file))
+ raise
+ except:
+ print("read file '{}' failed.".format(input_file))
+ raise
+ return data
+
+
+# Read file by line
+def read_file(input_file):
+ if not os.path.exists(input_file):
+ print("file '{}' doesn't exist.".format(input_file))
+ return None
+
+ data = []
+ try:
+ with open(input_file, 'r') as file_obj:
+ for line in file_obj.readlines():
+ data.append(line.rstrip('\n'))
+ except:
+ print("read file '{}' failed".format(input_file))
+ raise
+ return data
+
+
+# Write json file data
+def write_json_file(output_file, content, check_changes=False):
+ file_dir = os.path.dirname(os.path.abspath(output_file))
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir, exist_ok=True)
+
+ if check_changes is True:
+ changed = __check_changes(output_file, content)
+ else:
+ changed = True
+ if changed is True:
+ with open(output_file, 'w') as output_f:
+ json.dump(content, output_f, sort_keys=True, indent=2)
+
+
+def __check_changes(output_file, content):
+ if os.path.exists(output_file) and os.path.isfile(output_file):
+ # file content md5 val
+ sha256_obj = hashlib.sha256()
+ sha256_obj.update(str(read_json_file(output_file)).encode())
+ hash_value = sha256_obj.hexdigest()
+ # new content md5 val
+ sha256_obj_new = hashlib.sha256()
+ sha256_obj_new.update(str(content).encode())
+ hash_value_new = sha256_obj_new.hexdigest()
+ if hash_value_new == hash_value:
+ return False
+ return True
+
+
+# Write file data
+def write_file(output_file, content):
+ file_dir = os.path.dirname(os.path.abspath(output_file))
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir, exist_ok=True)
+
+ with open(output_file, 'w') as output_f:
+ output_f.write(content)
+ if output_file.endswith('.gni') or output_file.endswith('.gn'):
+ # Call gn format to make the output gn file prettier.
+ cmd = ['gn', 'format']
+ cmd.append(output_file)
+ subprocess.check_output(cmd)
diff --git a/dsoftbus/build/scripts/util/md5_check.py b/dsoftbus/build/scripts/util/md5_check.py
new file mode 100755
index 0000000000000000000000000000000000000000..8911a8da587b4ad51a3aa17f963bc9b0cf0cc16b
--- /dev/null
+++ b/dsoftbus/build/scripts/util/md5_check.py
@@ -0,0 +1,445 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import difflib
+import hashlib
+import itertools
+import json
+import os
+import zipfile
+from .pycache import pycache_enabled
+from .pycache import pycache
+
+# When set and a difference is detected, a diff of what changed is printed.
+PRINT_EXPLANATIONS = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+# An escape hatch that causes all targets to be rebuilt.
+_FORCE_REBUILD = int(os.environ.get('FORCE_REBUILD', 0))
+
+
+def get_new_metadata(input_strings, input_paths):
+ new_metadata = _Metadata()
+ new_metadata.add_strings(input_strings)
+
+ for path in input_paths:
+ if _is_zip_file(path):
+ entries = _extract_zip_entries(path)
+ new_metadata.add_zip_file(path, entries)
+ else:
+ new_metadata.add_file(path, _md5_for_path(path))
+ return new_metadata
+
+
+def get_old_metadata(record_path):
+ old_metadata = None
+ if os.path.exists(record_path):
+ with open(record_path, 'r') as jsonfile:
+ try:
+ old_metadata = _Metadata.from_file(jsonfile)
+ except: # noqa: E722 pylint: disable=bare-except
+ pass
+ return old_metadata
+
+
+def print_explanations(record_path, changes):
+ if PRINT_EXPLANATIONS:
+ print('=' * 80)
+ print('Target is stale: %s' % record_path)
+ print(changes.describe_difference())
+ print('=' * 80)
+
+
+def call_and_record_if_stale(
+ function, # pylint: disable=invalid-name
+ record_path=None,
+ input_paths=None,
+ input_strings=None,
+ output_paths=None,
+ force=False,
+ pass_changes=False):
+ """Calls function if outputs are stale.
+
+ Outputs are considered stale if:
+ - any output_paths are missing, or
+ - the contents of any file within input_paths has changed, or
+ - the contents of input_strings has changed.
+
+ To debug which files are out-of-date, set the environment variable:
+ PRINT_MD5_DIFFS=1
+
+ Args:
+ function: The function to call.
+ record_path: Path to record metadata.
+ Defaults to output_paths[0] + '.md5.stamp'
+ input_paths: List of paths to calculate a md5 sum on.
+ input_strings: List of strings to record verbatim.
+ output_paths: List of output paths.
+ force: Whether to treat outputs as missing regardless of whether they
+ actually are.
+ pass_changes: Whether to pass a Changes instance to |function|.
+ """
+ assert record_path or output_paths
+ input_paths = input_paths or []
+ input_strings = input_strings or []
+ output_paths = output_paths or []
+
+ new_metadata = get_new_metadata(input_strings, input_paths)
+ force = force or _FORCE_REBUILD
+ missing_outputs = [
+ x for x in output_paths if force or not os.path.exists(x)
+ ]
+
+ if pycache_enabled:
+ # Input strings, input files and outputs names together compose
+ # cache manifest, which is the only identifier of a python action.
+ manifest = '-'.join(
+ [new_metadata.strings_md5(),
+ new_metadata.files_md5()] + sorted(output_paths))
+ record_path = pycache.get_manifest_path('{}.manifest'.format(manifest))
+ old_metadata = get_old_metadata(record_path)
+ else:
+ record_path = record_path or output_paths[0] + '.md5.stamp'
+ # When outputs are missing, don't bother gathering change information.
+ if not missing_outputs:
+ old_metadata = get_old_metadata(record_path)
+ else:
+ old_metadata = None
+
+ changes = Changes(old_metadata, new_metadata, force, missing_outputs)
+ if not changes.has_changes():
+ if not pycache_enabled:
+ return
+ if pycache_enabled and pycache.retrieve(output_paths, prefix=manifest):
+ return
+
+ print_explanations(record_path, changes)
+
+ args = (changes, ) if pass_changes else ()
+ function(*args)
+ if pycache_enabled:
+ try:
+ pycache.report_cache_stat('cache_miss')
+ except: # noqa: E722 pylint: disable=bare-except
+ pass
+ pycache.save(output_paths, prefix=manifest)
+
+ with open(record_path, 'w') as record:
+ new_metadata.to_file(record)
+
+
+class Changes(object):
+ """Provides and API for querying what changed between runs."""
+ def __init__(self, old_metadata, new_metadata, force, missing_outputs):
+ self.old_metadata = old_metadata
+ self.new_metadata = new_metadata
+ self.force = force
+ self.missing_outputs = missing_outputs
+
+ def _get_old_tag(self, path, subpath=None):
+ return self.old_metadata and self.old_metadata.get_tag(path, subpath)
+
+ def has_changes(self):
+ """Returns whether any changes exist."""
+ return (
+ self.force or not self.old_metadata or
+ self.old_metadata.strings_md5() != self.new_metadata.strings_md5()
+ or self.old_metadata.files_md5() != self.new_metadata.files_md5())
+
+ def added_or_modified_only(self):
+ """Returns whether the only changes were from added or modified (sub)files.
+
+ No missing outputs, no removed paths/subpaths.
+ """
+ if (self.force or not self.old_metadata
+ or self.old_metadata.strings_md5() !=
+ self.new_metadata.strings_md5()):
+ return False
+ if any(self.iter_removed_paths()):
+ return False
+ for path in self.iter_modified_paths():
+ if any(self.iter_removed_subpaths(path)):
+ return False
+ return True
+
+ def iter_all_paths(self):
+ """Generator for paths."""
+ return self.new_metadata.iter_paths()
+
+ def iter_all_subpaths(self, path):
+ """Generator for subpaths."""
+ return self.new_metadata.iter_subpaths(path)
+
+ def iter_added_paths(self):
+ """Generator for paths that were added."""
+ for path in self.new_metadata.iter_paths():
+ if self._get_old_tag(path) is None:
+ yield path
+
+ def iter_added_subpaths(self, path):
+ """Generator for paths that were added within the given zip file."""
+ for subpath in self.new_metadata.iter_subpaths(path):
+ if self._get_old_tag(path, subpath) is None:
+ yield subpath
+
+ def iter_removed_paths(self):
+ """Generator for paths that were removed."""
+ if self.old_metadata:
+ for path in self.old_metadata.iter_paths():
+ if self.new_metadata.get_tag(path) is None:
+ yield path
+
+ def iter_removed_subpaths(self, path):
+ """Generator for paths that were removed within the given zip file."""
+ if self.old_metadata:
+ for subpath in self.old_metadata.iter_subpaths(path):
+ if self.new_metadata.get_tag(path, subpath) is None:
+ yield subpath
+
+ def iter_modified_paths(self):
+ """Generator for paths whose contents have changed."""
+ for path in self.new_metadata.iter_paths():
+ old_tag = self._get_old_tag(path)
+ new_tag = self.new_metadata.get_tag(path)
+ if old_tag is not None and old_tag != new_tag:
+ yield path
+
+ def iter_modified_subpaths(self, path):
+ """Generator for paths within a zip file whose contents have changed."""
+ for subpath in self.new_metadata.iter_subpaths(path):
+ old_tag = self._get_old_tag(path, subpath)
+ new_tag = self.new_metadata.get_tag(path, subpath)
+ if old_tag is not None and old_tag != new_tag:
+ yield subpath
+
+ def iter_changed_paths(self):
+ """Generator for all changed paths (added/removed/modified)."""
+ return itertools.chain(self.iter_removed_paths(),
+ self.iter_modified_paths(),
+ self.iter_added_paths())
+
+ def iter_changed_subpaths(self, path):
+ """Generator for paths within a zip that were added/removed/modified."""
+ return itertools.chain(self.iter_removed_subpaths(path),
+ self.iter_modified_subpaths(path),
+ self.iter_added_subpaths(path))
+
+ def describe_difference(self):
+ """Returns a human-readable description of what changed."""
+ if self.force:
+ return 'force=True'
+ elif self.old_metadata is None:
+ return 'Previous stamp file not found.'
+
+ if self.old_metadata.strings_md5() != self.new_metadata.strings_md5():
+ ndiff = difflib.ndiff(self.old_metadata.get_strings(),
+ self.new_metadata.get_strings())
+ changed = [s for s in ndiff if not s.startswith(' ')]
+ return 'Input strings changed:\n ' + '\n '.join(changed)
+
+ if self.old_metadata.files_md5() == self.new_metadata.files_md5():
+ return "There's no difference."
+
+ lines = []
+ lines.extend('Added: {}'.format(p for p in self.iter_added_paths()))
+ lines.extend('Removed: {}'.format(p
+ for p in self.iter_removed_paths()))
+ for path in self.iter_modified_paths():
+ lines.append('Modified: {}'.format(path))
+ lines.extend(' -> Subpath added: {}'.format(
+ p for p in self.iter_added_subpaths(path)))
+ lines.extend(' -> Subpath removed: {}'.format(
+ p for p in self.iter_removed_subpaths(path)))
+ lines.extend(' -> Subpath modified: {}'.format(
+ p for p in self.iter_modified_subpaths(path)))
+ if lines:
+ return 'Input files changed:\n {}'.format('\n '.join(lines))
+
+ if self.missing_outputs:
+ return 'Outputs do not exist:\n {}'.format('\n '.join(
+ self.missing_outputs))
+
+ return 'I have no idea what changed (there is a bug).'
+
+
+class _Metadata(object):
+ """Data model for tracking change metadata."""
+ def __init__(self):
+ self._files_md5 = None
+ self._strings_md5 = None
+ self._files = []
+ self._strings = []
+ # Map of (path, subpath) -> entry. Created upon first call to _get_entry().
+ self._file_map = None
+
+ @classmethod
+ def from_file(cls, fileobj):
+ """Returns a _Metadata initialized from a file object."""
+ ret = cls()
+ obj = json.load(fileobj)
+ ret._files_md5 = obj['files-md5']
+ ret._strings_md5 = obj['strings-md5']
+ ret._files = obj['input-files']
+ ret._strings = obj['input-strings']
+ return ret
+
+ def to_file(self, fileobj):
+ """Serializes metadata to the given file object."""
+ obj = {
+ "files-md5": self.files_md5(),
+ "strings-md5": self.strings_md5(),
+ "input-files": self._files,
+ "input-strings": self._strings,
+ }
+ json.dump(obj, fileobj, indent=2, sort_keys=True)
+
+ def _assert_not_queried(self):
+ assert self._files_md5 is None
+ assert self._strings_md5 is None
+ assert self._file_map is None
+
+ def add_strings(self, values):
+ self._assert_not_queried()
+ self._strings.extend(str(v) for v in values)
+
+ def add_file(self, path, tag):
+ """Adds metadata for a non-zip file.
+
+ Args:
+ path: Path to the file.
+ tag: A short string representative of the file contents.
+ """
+ self._assert_not_queried()
+ self._files.append({
+ 'path': path,
+ 'tag': tag,
+ })
+
+ def add_zip_file(self, path, entries):
+ """Adds metadata for a zip file.
+
+ Args:
+ path: Path to the file.
+ entries: List of (subpath, tag) tuples for entries within the zip.
+ """
+ self._assert_not_queried()
+ tag = _compute_inline_md5(
+ itertools.chain((e[0] for e in entries), (e[1] for e in entries)))
+ self._files.append({
+ 'path':
+ path,
+ 'tag':
+ tag,
+ 'entries': [{
+ "path": e[0],
+ "tag": e[1]
+ } for e in entries],
+ })
+
+ def get_strings(self):
+ """Returns the list of input strings."""
+ return self._strings
+
+ def files_md5(self):
+ """Lazily computes and returns the aggregate md5 of input files."""
+ if self._files_md5 is None:
+ # Omit paths from md5 since temporary files have random names.
+ self._files_md5 = _compute_inline_md5(
+ self.get_tag(p) for p in sorted(self.iter_paths()))
+ return self._files_md5
+
+ def strings_md5(self):
+ """Lazily computes and returns the aggregate md5 of input strings."""
+ if self._strings_md5 is None:
+ self._strings_md5 = _compute_inline_md5(self._strings)
+ return self._strings_md5
+
+ def _get_entry(self, path, subpath=None):
+ """Returns the JSON entry for the given path / subpath."""
+ if self._file_map is None:
+ self._file_map = {}
+ for entry in self._files:
+ self._file_map[(entry['path'], None)] = entry
+ for subentry in entry.get('entries', ()):
+ self._file_map[(entry['path'],
+ subentry['path'])] = subentry
+ return self._file_map.get((path, subpath))
+
+ def get_tag(self, path, subpath=None):
+ """Returns the tag for the given path / subpath."""
+ ret = self._get_entry(path, subpath)
+ return ret and ret['tag']
+
+ def iter_paths(self):
+ """Returns a generator for all top-level paths."""
+ return (e['path'] for e in self._files)
+
+ def iter_subpaths(self, path):
+ """Returns a generator for all subpaths in the given zip.
+
+ If the given path is not a zip file or doesn't exist, returns an empty
+ iterable.
+ """
+ outer_entry = self._get_entry(path)
+ if not outer_entry:
+ return ()
+ subentries = outer_entry.get('entries', [])
+ return (entry['path'] for entry in subentries)
+
+
+def _update_md5_for_file(md5, path, block_size=2**16):
+ # record md5 of linkto for dead link.
+ if os.path.islink(path):
+ linkto = os.readlink(path)
+ if not os.path.exists(linkto):
+ md5.update(linkto.encode())
+ return
+
+ with open(path, 'rb') as infile:
+ while True:
+ data = infile.read(block_size)
+ if not data:
+ break
+ md5.update(data)
+
+
+def _update_md5_for_directory(md5, dir_path):
+ for root, _, files in os.walk(dir_path):
+ for f in files:
+ _update_md5_for_file(md5, os.path.join(root, f))
+
+
+def _md5_for_path(path):
+ md5 = hashlib.md5()
+ if os.path.isdir(path):
+ _update_md5_for_directory(md5, path)
+ else:
+ _update_md5_for_file(md5, path)
+ return md5.hexdigest()
+
+
+def _compute_inline_md5(iterable):
+ """Computes the md5 of the concatenated parameters."""
+ md5 = hashlib.md5()
+ for item in iterable:
+ md5.update(str(item).encode())
+ return md5.hexdigest()
+
+
+def _is_zip_file(path):
+ """Returns whether to treat the given file as a zip file."""
+ return path[-4:] in ('.zip')
+
+
+def _extract_zip_entries(path):
+ """Returns a list of (path, CRC32) of all files within |path|."""
+ entries = []
+ with zipfile.ZipFile(path) as zip_file:
+ for zip_info in zip_file.infolist():
+ # Skip directories and empty files.
+ if zip_info.CRC:
+ entries.append(
+ (zip_info.filename, zip_info.CRC + zip_info.compress_type))
+ return entries
diff --git a/dsoftbus/build/scripts/util/pycache.py b/dsoftbus/build/scripts/util/pycache.py
new file mode 100755
index 0000000000000000000000000000000000000000..11dacbde40bf6d6f655706aade0945ebea2bff12
--- /dev/null
+++ b/dsoftbus/build/scripts/util/pycache.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import shutil
+import os
+import hashlib
+import json
+import http.client as client
+from . import build_utils
+
+
+class Storage():
+ def __init__(self):
+ pass
+
+ @classmethod
+ def retrieve_object(cls, cache_artifact, obj):
+ possible_dir_cache_artifact = '{}.directory'.format(cache_artifact)
+
+ if os.path.exists(cache_artifact):
+ os.makedirs(os.path.dirname(obj), exist_ok=True)
+ shutil.copyfile(cache_artifact, obj)
+ os.utime(cache_artifact)
+ if pycache_debug_enable:
+ print('Retrieve {} from cache'.format(obj))
+ elif os.path.exists(possible_dir_cache_artifact):
+ # Extract zip archive if it's cache artifact for directory.
+ os.makedirs(obj, exist_ok=True)
+ build_utils.extract_all(possible_dir_cache_artifact,
+ obj,
+ no_clobber=False)
+ os.utime(possible_dir_cache_artifact)
+ if pycache_debug_enable:
+ print('Extract {} from cache'.format(obj))
+ else:
+ if pycache_debug_enable:
+ print('Failed to retrieve {} from cache'.format(obj))
+ return 0
+ return 1
+
+ @classmethod
+ def add_object(cls, cache_artifact, obj):
+ cache_dir = os.path.dirname(cache_artifact)
+ os.makedirs(cache_dir, exist_ok=True)
+
+ if not os.path.exists(obj):
+ return
+ # If path is directory, store an zip archive.
+ if os.path.isdir(obj):
+ dir_cache_artifact = '{}.directory'.format(cache_artifact)
+ build_utils.zip_dir(dir_cache_artifact, obj)
+ if pycache_debug_enable:
+ print("archive {} to {}".format(obj, dir_cache_artifact))
+ else:
+ shutil.copyfile(obj, cache_artifact)
+ if pycache_debug_enable:
+ print("copying {} to {}".format(obj, cache_artifact))
+
+
+class PyCache():
+ def __init__(self, cache_dir=None):
+ cache_dir = os.environ.get('PYCACHE_DIR')
+ if cache_dir:
+ self.pycache_dir = cache_dir
+ else:
+ raise Exception('Error: failed to get PYCACHE_DIR')
+ self.storage = Storage()
+
+ def retrieve(self, output_paths, prefix=''):
+ for path in output_paths:
+ _, cache_artifact = self.descend_directory('{}{}'.format(
+ prefix, path))
+ result = self.storage.retrieve_object(cache_artifact, path)
+ if not result:
+ return result
+
+ try:
+ self.report_cache_stat('cache_hit')
+ except: # noqa: E722 pylint: disable=bare-except
+ pass
+ return 1
+
+ def save(self, output_paths, prefix=''):
+ for path in output_paths:
+ _, cache_artifact = self.descend_directory('{}{}'.format(
+ prefix, path))
+ self.storage.add_object(cache_artifact, path)
+
+ def report_cache_stat(self, hit_or_miss):
+ pyd_server, pyd_port = self.get_pyd()
+ conn = client.HTTPConnection(pyd_server, pyd_port)
+ conn.request(hit_or_miss, '/')
+ conn.close()
+
+ def get_pyd(self):
+ daemon_config_file = '{}/.config'.format(self.pycache_dir)
+ if not os.path.exists(daemon_config_file):
+ raise Exception('Warning: no pycache daemon process exists.')
+ with open(daemon_config_file, 'r') as jsonfile:
+ data = json.load(jsonfile)
+ return data.get('host'), data.get('port')
+
+ @classmethod
+ def cache_key(cls, path):
+ sha256 = hashlib.sha256()
+ sha256.update(path.encode())
+ return sha256.hexdigest()
+
+ def descend_directory(self, path):
+ digest = self.cache_key(path)
+ cache_dir = os.path.join(self.pycache_dir, digest[:2])
+ return cache_dir, os.path.join(cache_dir, digest[2:])
+
+ # Manifest file to record inputs/outputs/commands.
+ def get_manifest_path(self, path):
+ manifest_dir, manifest_file = self.descend_directory(path)
+ os.makedirs(manifest_dir, exist_ok=True)
+ return manifest_file
+
+
+pycache_enabled = (os.environ.get('PYCACHE_DIR') is not None)
+pycache_debug_enable = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+if pycache_enabled:
+ pycache = PyCache()
+else:
+ pycache = None # pylint: disable=invalid-name
diff --git a/dsoftbus/build/scripts/util/pyd.py b/dsoftbus/build/scripts/util/pyd.py
new file mode 100755
index 0000000000000000000000000000000000000000..78d0f42d51c216356d9b613a3cf9ba39f468049b
--- /dev/null
+++ b/dsoftbus/build/scripts/util/pyd.py
@@ -0,0 +1,219 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import argparse
+import errno
+import json
+import datetime
+import http.client as client
+
+from http.server import BaseHTTPRequestHandler
+from http.server import HTTPServer
+
+PYCACHE_PORT = 7970 # Ascii code for 'yp'
+LOCALHOST = '127.0.0.1'
+DEBUG = int(os.environ.get('PRINT_BUILD_EXPLANATIONS', 0))
+
+
+class PycacheDaemonRequestHandler(BaseHTTPRequestHandler):
+ # Suppress logs
+ def log_message(self, format, *args): # pylint: disable=redefined-builtin
+ if DEBUG:
+ super().log_message(format, *args)
+ else:
+ pass
+
+ def do_cache_hit(self):
+ self.server.hit_times += 1
+ self.send_response(200)
+
+ def do_cache_miss(self):
+ self.server.miss_times += 1
+ self.send_response(200)
+
+ def do_cache_manage(self):
+ self.send_response(200)
+ self.server.cache_manage()
+
+ def do_show_statistics(self):
+ self.send_response(200)
+ self.server.show_statistics()
+
+ def do_stop_service(self):
+ self.send_response(200)
+ self.server.stop_service = True
+
+
+class PycacheDaemon(HTTPServer):
+ def __init__(self, *args, **kargs):
+ self.hit_times = 0
+ self.miss_times = 0
+ self.stop_service = False
+ self.pycache_dir = None
+ self.pycache_config_file = None
+ super().__init__(*args, **kargs)
+
+ def serve_forever(self, poll_interval=0.5):
+ while not self.stop_service:
+ self.handle_request()
+ os.unlink(self.pycache_config_file)
+
+ def record_pycache_config(self, pycache_dir):
+ root = os.path.realpath(pycache_dir)
+ self.pycache_dir = root
+ self.pycache_config_file = os.path.join(root, '.config')
+ os.makedirs(root, exist_ok=True)
+ host, port = self.server_address[:2]
+ config = {
+ 'root': root,
+ 'config_file': self.pycache_config_file,
+ 'debug': bool(DEBUG),
+ 'host': host,
+ 'port': port,
+ }
+ with open(self.pycache_config_file, 'w') as jsonfile:
+ json.dump(config, jsonfile, indent=2, sort_keys=True)
+
+ def cache_manage(self):
+ now = datetime.datetime.now()
+ days = 15
+ earlier_time = (now - datetime.timedelta(days)).timestamp()
+ # Pycache pool holds as much as 40GB or as long as 15 days
+ # cache objects
+ while days > 0:
+ disk_usage = 0
+ for root, _, files in os.walk(self.pycache_dir):
+ for file in files:
+ path = os.path.join(root, file)
+ stat = os.stat(path)
+ if stat.st_atime < int(earlier_time):
+ os.unlink(path)
+ else:
+ disk_usage += stat.st_size
+ if disk_usage >= 40 * 1024 * 1024 * 1024:
+ days -= 1
+ earlier_time = (now - datetime.timedelta(days)).timestamp()
+ else:
+ break
+
+ def show_statistics(self):
+ actions = self.hit_times + self.miss_times
+ if actions != 0:
+ print('-' * 80)
+ print('pycache statistics:')
+ print('pycache hit targets: {}'.format(self.hit_times))
+ print('pycache miss targets: {}'.format(self.miss_times))
+ hit_rate = float(self.hit_times) / actions * 100
+ miss_rate = float(self.miss_times) / actions * 100
+ print('pycache hit rate: {:.2f}%'.format(hit_rate))
+ print('pycache miss rate: {:.2f}%'.format(miss_rate))
+ print('-' * 80)
+ else:
+ print('-' * 80)
+ print('No pycache actions in pycache, skip statistics')
+ print('-' * 80)
+
+
+def start_server(host, port, root):
+ if root is None:
+ print('Warning: missing pycache root directory')
+ return
+ server_address = (host, port)
+ try:
+ pyd = PycacheDaemon(server_address, PycacheDaemonRequestHandler)
+ print('Starting pycache daemon at {}:{}'.format(host, port))
+ pyd.record_pycache_config(root)
+ pyd.serve_forever()
+ except OSError as err:
+ if err.errno == errno.EADDRINUSE:
+ start_server(host, port + 2, root)
+ else:
+ print('Warning: Failed to start pycache daemon process')
+
+
+def get_pyd():
+ cache_dir = os.environ.get('PYCACHE_DIR')
+ daemon_config_file = '{}/.config'.format(cache_dir)
+ if not os.path.exists(daemon_config_file):
+ raise Exception('Warning: {} not exists'.format(daemon_config_file))
+ with open(daemon_config_file, 'r') as jsonfile:
+ data = json.load(jsonfile)
+ return data.get('host'), data.get('port')
+
+
+def stop_server():
+ try:
+ host, port = get_pyd()
+ conn = client.HTTPConnection(host, port)
+ conn.request('stop_service', '/')
+ conn.close()
+ except: # noqa: E722 pylint: disable=bare-except
+ pass
+
+
+def show_statistics():
+ try:
+ host, port = get_pyd()
+ conn = client.HTTPConnection(host, port)
+ conn.request('show_statistics', '/')
+ conn.close()
+ except: # noqa: E722 pylint: disable=bare-except
+ pass
+
+
+def manage_cache_contents():
+ try:
+ host, port = get_pyd()
+ conn = client.HTTPConnection(host, port)
+ conn.request('cache_manage', '/')
+ conn.close()
+ except: # noqa: E722 pylint: disable=bare-except
+ pass
+
+
+def main(args):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--root', help='path to pycache root directory')
+ parser.add_argument('--port',
+ default=PYCACHE_PORT,
+ help='which port to listen')
+ parser.add_argument('--start',
+ action='store_true',
+ help='start daemon process for pycache')
+ parser.add_argument('--stop',
+ action='store_true',
+ help='stop pycache daemon process')
+ parser.add_argument('--stat',
+ action='store_true',
+ help='report cache statistics')
+ parser.add_argument('--manage',
+ action='store_true',
+ help='manage pycache contents')
+
+ options = parser.parse_args(args)
+ if options.start:
+ start_server(LOCALHOST, int(options.port), options.root)
+ if options.stop:
+ stop_server()
+ if options.stat:
+ show_statistics()
+ if options.manage:
+ manage_cache_contents()
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/dsoftbus/build/scripts/util/zip_and_md5.py b/dsoftbus/build/scripts/util/zip_and_md5.py
new file mode 100755
index 0000000000000000000000000000000000000000..7803f183f67a89b782ad8c31aaa98c3ff7a8032e
--- /dev/null
+++ b/dsoftbus/build/scripts/util/zip_and_md5.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import hashlib
+sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
+import file_utils # noqa: E402
+from scripts.util import build_utils # noqa: E402
+
+__MAX_BUF = 1024 * 1024
+
+
+def _gen_signature(input_file):
+ if not os.path.isfile(input_file):
+ raise Exception()
+ hash_value = ''
+ sha256obj = hashlib.sha256()
+ try:
+ with open(input_file, 'rb') as file_obj:
+ while True:
+ buf = file_obj.read(__MAX_BUF)
+ if not buf:
+ break
+ sha256obj.update(buf)
+ hash_value = sha256obj.hexdigest()
+ except OSError as err:
+ sys.stdout.write("read file failed. {}".format(err))
+ return hash_value
+
+
+def _write_signature_file(signature_file, hash_value):
+ if os.path.exists(signature_file):
+ os.remove(signature_file)
+ file_utils.write_file(signature_file, hash_value)
+
+
+def _update_signature(signature_file, new_hash_value):
+ if os.path.exists(signature_file):
+ data = file_utils.read_file(signature_file)
+ if data is None:
+ raise Exception(
+ "read signatrue file '{}' failed.".format(signature_file))
+ old_value = data[0]
+ if old_value is None or old_value == '':
+ raise Exception(
+ "signatrue file '{}' content error.".format(signature_file))
+ if old_value == new_hash_value:
+ return
+ _write_signature_file(signature_file, new_hash_value)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input-dir', required=True)
+ parser.add_argument('--output-zipfile', required=True)
+ parser.add_argument('--signature-file', required=True)
+ args = parser.parse_args()
+
+ if os.path.exists(args.output_zipfile):
+ os.remove(args.output_zipfile)
+ build_utils.zip_dir(args.output_zipfile, args.input_dir)
+ if not os.path.exists(args.output_zipfile):
+ raise Exception("generate zipfile '{}' failed.".format(
+ args.output_zipfile))
+
+ hash_value = _gen_signature(args.output_zipfile)
+ _update_signature(args.signature_file, hash_value)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/scripts/util/zip_and_md5.pydeps b/dsoftbus/build/scripts/util/zip_and_md5.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..9e09e06a5666e9882b20fe40078d1e9038d940ce
--- /dev/null
+++ b/dsoftbus/build/scripts/util/zip_and_md5.pydeps
@@ -0,0 +1,10 @@
+# Generated by running:
+# build/print_python_deps.py --root build/scripts/util --output build/scripts/util/zip_and_md5.pydeps build/scripts/util/zip_and_md5.py
+../../gn_helpers.py
+../__init__.py
+__init__.py
+build_utils.py
+file_utils.py
+md5_check.py
+pycache.py
+zip_and_md5.py
diff --git a/dsoftbus/build/subsystem_config.json b/dsoftbus/build/subsystem_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f04f29ea8cac1f3918a96eb5ea5c3d4099e1c5a0
--- /dev/null
+++ b/dsoftbus/build/subsystem_config.json
@@ -0,0 +1,182 @@
+{
+ "ace": {
+ "project": "hmf/ace",
+ "path": "foundation/ace",
+ "name": "ace",
+ "dir": "foundation"
+ },
+ "account": {
+ "project": "hmf/account",
+ "path": "base/account",
+ "name": "account",
+ "dir": "base"
+ },
+ "distributeddatamgr": {
+ "project": "hmf/distributeddatamgr",
+ "path": "foundation/distributeddatamgr",
+ "name": "distributeddatamgr",
+ "dir": "foundation"
+ },
+ "security": {
+ "project": "hmf/security",
+ "path": "base/security",
+ "name": "security",
+ "dir": "base"
+ },
+ "startup": {
+ "project": "hmf/startup",
+ "path": "base/startup",
+ "name": "startup",
+ "dir": "base"
+ },
+ "hiviewdfx": {
+ "project": "hmf/hiviewdfx",
+ "path": "base/hiviewdfx",
+ "name": "hiviewdfx",
+ "dir": "base"
+ },
+ "utils": {
+ "project": "hmf/utils",
+ "path": "utils",
+ "name": "utils",
+ "dir": "utils"
+ },
+ "appexecfwk": {
+ "project": "hmf/appexecfwk",
+ "path": "foundation/appexecfwk",
+ "name": "appexecfwk",
+ "dir": "foundation"
+ },
+ "aafwk": {
+ "project": "hmf/aafwk",
+ "path": "foundation/aafwk",
+ "name": "aafwk",
+ "dir": "foundation"
+ },
+ "notification": {
+ "project": "hmf/notification",
+ "path": "base/notification",
+ "name": "notification",
+ "dir": "base"
+ },
+ "communication": {
+ "project": "hmf/communication",
+ "path": "foundation/communication",
+ "name": "communication",
+ "dir": "foundation"
+ },
+ "distributedschedule": {
+ "project": "hmf/distributedschedule",
+ "path": "foundation/distributedschedule",
+ "name": "distributedschedule",
+ "dir": "foundation"
+ },
+ "hdf": {
+ "project": "hmf/drivers/adapter_uhdf2",
+ "path": "drivers/adapter/uhdf2",
+ "name": "hdf",
+ "dir": "uhdf2"
+ },
+ "updater": {
+ "project": "hmf/updater",
+ "path": "base/update/updater",
+ "name": "updater",
+ "dir": "base/update"
+ },
+ "developtools": {
+ "project": "hmf/developtools",
+ "path": "developtools",
+ "name": "developtools",
+ "dir": "developtools"
+ },
+ "sensors": {
+ "project": "hmf/sensors",
+ "path": "base/sensors",
+ "name": "sensors",
+ "dir": "base"
+ },
+ "graphic": {
+ "project": "hmf/graphic",
+ "path": "foundation/graphic",
+ "name": "graphic",
+ "dir": "foundation"
+ },
+ "miscservices": {
+ "project": "hmf/miscservices",
+ "path": "base/miscservices",
+ "name": "miscservices",
+ "dir": "base"
+ },
+ "multimedia": {
+ "project": "hmf/multimedia",
+ "path": "foundation/multimedia",
+ "name": "multimedia",
+ "dir": "foundation"
+ },
+ "multimodalinput": {
+ "project": "hmf/multimodalinput",
+ "path": "foundation/multimodalinput",
+ "name": "multimodalinput",
+ "dir": "foundation"
+ },
+ "telephony": {
+ "project": "hmf/telephony",
+ "path": "base/telephony",
+ "name": "telephony",
+ "dir": "base"
+ },
+ "global": {
+ "project": "hmf/global",
+ "path": "base/global",
+ "name": "global",
+ "dir": "base"
+ },
+ "powermgr": {
+ "project": "hmf/powermgr",
+ "path": "base/powermgr",
+ "name": "powermgr",
+ "dir": "base"
+ },
+ "applications":{
+ "project": "hmf/applications/standard/hap",
+ "path": "applications/standard",
+ "name": "applications",
+ "dir": "applications"
+ },
+ "wpa_supplicant-2.9": {
+ "project": "hmf/wpa_supplicant-2.9",
+ "path": "third_party/wpa_supplicant/wpa_supplicant-2.9_standard",
+ "name": "wpa_supplicant-2.9",
+ "dir": "wpa_supplicant-2.9"
+ },
+ "xts": {
+ "project": "hmf/xts",
+ "path": "test/xts",
+ "name": "xts",
+ "dir": "test"
+ },
+ "distributedhardware": {
+ "project": "hmf/distributedhardware",
+ "path": "foundation/distributedhardware",
+ "name": "distributedhardware",
+ "dir": "foundation"
+ },
+ "ark": {
+ "project": "hmf/ark",
+ "path": "ark",
+ "name": "ark",
+ "dir": "ark"
+ },
+ "compileruntime": {
+ "project": "hmf/js_util_module",
+ "path": "base/compileruntime",
+ "name": "compileruntime",
+ "dir": "base"
+ },
+ "kernel": {
+ "project":"hmf/kernel",
+ "path": "kernel/linux/build",
+ "name": "kernel",
+ "dir": "kernel/linux"
+ }
+}
diff --git a/dsoftbus/build/subsystem_config_example.json b/dsoftbus/build/subsystem_config_example.json
new file mode 100644
index 0000000000000000000000000000000000000000..da20e8f1a7ebab6aed2193076c490892bd24cfda
--- /dev/null
+++ b/dsoftbus/build/subsystem_config_example.json
@@ -0,0 +1,8 @@
+{
+ "subsystem_examples": {
+ "project": "hmf/test/developertest",
+ "path": "test/developertest/examples",
+ "name": "subsystem_examples",
+ "dir": "test"
+ }
+}
diff --git a/dsoftbus/build/templates/common/copy.gni b/dsoftbus/build/templates/common/copy.gni
new file mode 100755
index 0000000000000000000000000000000000000000..7064ea565d61c1d3be85f7e40f291b02f35c5a12
--- /dev/null
+++ b/dsoftbus/build/templates/common/copy.gni
@@ -0,0 +1,158 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/ohos/notice/notice.gni")
+import("//build/templates/metadata/module_info.gni")
+
+template("ohos_copy") {
+ assert(defined(invoker.sources),
+ "sources must be defined for ${target_name}.")
+ assert(defined(invoker.outputs),
+ "outputs must be defined for ${target_name}.")
+
+ _is_test_target = defined(invoker.testonly) && invoker.testonly
+ _is_prebuilt = defined(invoker.prebuilt) && invoker.prebuilt
+ assert(_is_prebuilt != "") # Mark as used
+
+ # module_info generation is bypassed for prebuilt static library
+ _bypass_module_info_generation =
+ defined(invoker.bypass_module_info_generation) &&
+ invoker.bypass_module_info_generation
+ _main_target_name = target_name
+ _target_label =
+ get_label_info(":${_main_target_name}", "label_with_toolchain")
+ assert(_target_label != "") # Mark as used
+
+ if (defined(invoker.subsystem_name) && defined(invoker.part_name)) {
+ _subsystem_name = invoker.subsystem_name
+ _part_name = invoker.part_name
+ } else if (defined(invoker.part_name)) {
+ _part_name = invoker.part_name
+ _part_subsystem_info_file =
+ "$root_build_dir/build_configs/parts_info/part_subsystem.json"
+ _arguments = [
+ "--part-name",
+ _part_name,
+ "--part-subsystem-info-file",
+ rebase_path(_part_subsystem_info_file, root_build_dir),
+ ]
+ get_subsystem_script = "//build/templates/common/get_subsystem_name.py"
+ _subsystem_name =
+ exec_script(get_subsystem_script, _arguments, "trim string")
+ } else if (defined(invoker.subsystem_name)) {
+ _subsystem_name = invoker.subsystem_name
+ _part_name = _subsystem_name
+ } else {
+ _subsystem_name = "common"
+ _part_name = _subsystem_name
+ }
+ assert(_subsystem_name != "") # Mark as used
+ assert(_part_name != "") # Mark as used
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ if (!_is_test_target) {
+ _notice_target = "${_main_target_name}__notice"
+
+ # Prebuilt target has some lexing error character
+ _notice_target = string_replace(_notice_target, "@", "_")
+ _notice_target = string_replace(_notice_target, "+", "_")
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+ module_name = _main_target_name
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ }
+ _deps += [ ":$_notice_target" ]
+ }
+
+ copy(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ "public_configs",
+ "sources",
+ "outputs",
+ ])
+ deps = _deps
+
+ if (!_bypass_module_info_generation) {
+ _install_module_info = {
+ module_def = _target_label
+ module_info_file =
+ rebase_path(get_label_info(_target_label, "target_out_dir"),
+ root_build_dir) +
+ "/${_main_target_name}_module_info.json"
+ subsystem_name = _subsystem_name
+ part_name = _part_name
+ toolchain = current_toolchain
+ toolchain_out_dir = rebase_path(root_out_dir, root_build_dir)
+ }
+
+ metadata = {
+ install_modules = [ _install_module_info ]
+ }
+ }
+ }
+
+ if (!_bypass_module_info_generation) {
+ generate_module_info("${_main_target_name}_info") {
+ forward_variables_from(invoker,
+ [
+ "module_install_dir",
+ "relative_install_dir",
+ "module_source_dir",
+ "module_install_name",
+ "module_type",
+ "install_enable",
+ ])
+ module_name = _main_target_name
+ if (!defined(module_type)) {
+ module_type = "unknown"
+ }
+ if (!defined(module_source_dir)) {
+ module_source_dir = "${target_out_dir}"
+ }
+
+ if (_is_prebuilt) {
+ _sources = invoker.sources
+ module_source = _sources[0]
+ }
+ prebuilt = _is_prebuilt
+
+ if (!defined(install_enable)) {
+ install_enable = false
+ }
+
+ module_install_images = [ "system" ]
+ if (defined(invoker.install_images)) {
+ module_install_images = []
+ module_install_images += invoker.install_images
+ }
+
+ if (defined(invoker.symlink_target_name)) {
+ symlink_target_name = invoker.symlink_target_name
+ }
+
+ notice = "$target_out_dir/$_main_target_name.notice.txt"
+ }
+ }
+}
diff --git a/dsoftbus/build/templates/common/external_deps_handler.py b/dsoftbus/build/templates/common/external_deps_handler.py
new file mode 100755
index 0000000000000000000000000000000000000000..f30026870fda104210a3bd17242eea6db7108115
--- /dev/null
+++ b/dsoftbus/build/templates/common/external_deps_handler.py
@@ -0,0 +1,239 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import os
+import sys
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import read_json_file, write_json_file # noqa: E402
+
+
+def get_toolchain(current_variant, external_part_variants, platform_toolchain):
+ if current_variant == 'phone':
+ toolchain = platform_toolchain.get(current_variant)
+ required_include_dir = False
+ else:
+ if current_variant in external_part_variants:
+ toolchain = platform_toolchain.get(current_variant)
+ required_include_dir = False
+ else:
+ toolchain = platform_toolchain.get('phone')
+ required_include_dir = True
+ return toolchain, required_include_dir
+
+
+def _get_external_module_info(parts_inner_kits_info, external_part_name,
+ external_module_name, adapted_part_name):
+ _inner_kits_info_dict = parts_inner_kits_info.get(external_part_name)
+ if _inner_kits_info_dict is None:
+ raise Exception(
+ "external dep part '{}' doesn't exist.".format(external_part_name))
+ if external_module_name in _inner_kits_info_dict:
+ external_module_desc_info = _inner_kits_info_dict.get(
+ external_module_name)
+ elif adapted_part_name:
+ _new_kits_info_dict = parts_inner_kits_info.get(adapted_part_name)
+ if _new_kits_info_dict is None:
+ raise Exception(
+ "part '{}' doesn't exist.".format(adapted_part_name))
+ external_module_desc_info = _new_kits_info_dict.get(
+ external_module_name)
+ if external_module_desc_info is None:
+ raise Exception(
+ "external dep module '{}' doesn't exist in part '{}'.".format(
+ external_module_name, adapted_part_name))
+ else:
+ raise Exception(
+ "external dep module '{}' doesn't exist in part '{}'.".format(
+ external_module_name, external_part_name))
+ return external_module_desc_info
+
+
+def _get_external_module_from_sdk(sdk_base_dir, external_part_name,
+ external_module_name, adapted_part_name):
+ _sdk_info_file = os.path.join(sdk_base_dir, external_part_name,
+ "sdk_info.json")
+ subsystem_sdk_info = read_json_file(_sdk_info_file)
+ if subsystem_sdk_info is None:
+ raise Exception("part '{}' doesn't exist in sdk modules.".format(
+ external_part_name))
+
+ _adapted = False
+ if external_module_name in subsystem_sdk_info:
+ sdk_module_info = subsystem_sdk_info.get(external_module_name)
+ elif adapted_part_name:
+ _new_sdk_info_file = os.path.join(sdk_base_dir, adapted_part_name,
+ "sdk_info.json")
+ _new_subsystem_sdk_info = read_json_file(_new_sdk_info_file)
+ if _new_subsystem_sdk_info is None:
+ raise Exception("part '{}' doesn't exist sdk modules.".format(
+ adapted_part_name))
+ sdk_module_info = _new_subsystem_sdk_info.get(external_module_name)
+ if sdk_module_info is None:
+ raise Exception(
+ "external dep module '{}' doesn't exist in part '{}'.".format(
+ external_module_name, adapted_part_name))
+ _adapted = True
+ else:
+ raise Exception(
+ "external dep module '{}' doesn't exist in part '{}'.".format(
+ external_module_name, external_part_name))
+ return sdk_module_info, _adapted
+
+
+def _get_inner_kits_adapter_info(innerkits_adapter_info_file):
+ _parts_compatibility = {}
+ if os.path.exists(innerkits_adapter_info_file):
+ inner_kits_adapter_info = read_json_file(innerkits_adapter_info_file)
+ if inner_kits_adapter_info is None:
+ raise Exception("read inner_kits_adapter info failed.")
+ _parts_compatibility.update(inner_kits_adapter_info)
+ return _parts_compatibility
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--external-deps', nargs='*', required=True)
+ parser.add_argument('--parts-src-flag-file', required=True)
+ parser.add_argument('--sdk-base-dir', required=True)
+ parser.add_argument('--sdk-dir-name', required=True)
+ parser.add_argument('--external-deps-temp-file', required=True)
+ parser.add_argument('--use-sdk', dest='use_sdk', action='store_true')
+ parser.set_defaults(use_sdk=False)
+ parser.add_argument('--current-toolchain', required=False, default='')
+ parser.add_argument(
+ '--innerkits-adapter-info-file',
+ default='../../build/ohos/inner_kits_adapter.json')
+ args = parser.parse_args()
+
+ if len(args.external_deps) == 0:
+ result = {}
+ write_json_file(args.external_deps_temp_file, result)
+ return 0
+
+ # parts info
+ parts_src_flag = read_json_file(args.parts_src_flag_file)
+ # external deps list
+ external_deps = args.external_deps
+ # sdk base dir
+ sdk_base_dir = args.sdk_base_dir
+ sdk_dir_name = args.sdk_dir_name
+ use_sdk = args.use_sdk
+
+ deps = []
+ libs = []
+ include_dirs = []
+
+ # load inner kits info file
+ inner_kits_info_file = 'build_configs/parts_info/inner_kits_info.json'
+ all_kits_info_dict = read_json_file(inner_kits_info_file)
+ if all_kits_info_dict is None:
+ raise Exception("read pre_build inner_kits_info failed.")
+
+ # load parts variants
+ parts_variants_info_file = 'build_configs/parts_info/parts_variants.json'
+ all_parts_variants_info = read_json_file(parts_variants_info_file)
+ if all_parts_variants_info is None:
+ raise Exception("read pre_build parts_variants failed.")
+
+ # load toolchains info
+ toolchain_variant_info_file = os.path.join('build_configs',
+ 'platforms_info',
+ 'toolchain_to_variant.json')
+ toolchain_variant_info = read_json_file(toolchain_variant_info_file)
+ if toolchain_variant_info is None:
+ raise Exception("read pre_build parts_variants failed.")
+ toolchain_platform = toolchain_variant_info.get('toolchain_platform')
+ current_variant = toolchain_platform.get(args.current_toolchain)
+ if not current_variant:
+ current_variant = 'phone'
+ platform_toolchain = toolchain_variant_info.get('platform_toolchain')
+
+ # compatibility interim
+ _parts_compatibility = _get_inner_kits_adapter_info(
+ args.innerkits_adapter_info_file)
+
+ for external_lib in external_deps:
+ deps_desc = external_lib.split(':')
+ external_part_name = deps_desc[0]
+ external_module_name = deps_desc[1]
+
+ # Usually the value is None
+ _adapted_part_name = _parts_compatibility.get(external_part_name)
+
+ # Check if the subsystem has source code
+ if not use_sdk and external_part_name in parts_src_flag:
+ external_module_desc_info = _get_external_module_info(
+ all_kits_info_dict, external_part_name, external_module_name,
+ _adapted_part_name)
+ dep_label = external_module_desc_info['label']
+
+ part_variants_info = all_parts_variants_info.get(external_part_name)
+ if part_variants_info is None:
+ raise Exception(
+ "external deps part '{}' variants info is None.".format(
+ external_part_name))
+ toolchain, required_include_dir = get_toolchain(
+ current_variant, part_variants_info.keys(), platform_toolchain)
+ dep_label_with_tc = "{}({})".format(dep_label, toolchain)
+ deps += [dep_label_with_tc]
+
+ if required_include_dir is True and external_module_desc_info.get(
+ 'type') == 'so':
+ include_dir = external_module_desc_info.get('header_base')
+ include_dirs.append(include_dir)
+
+ # sdk prebuilt
+ if external_module_desc_info['prebuilt_enable']:
+ libs += [external_module_desc_info['prebuilt_source']]
+ else:
+ sdk_module_info, adapted_ok = _get_external_module_from_sdk(
+ sdk_base_dir, external_part_name, external_module_name,
+ _adapted_part_name)
+
+ if adapted_ok is True:
+ _external_part_name = _adapted_part_name
+ else:
+ _external_part_name = external_part_name
+ deps += [
+ "//{}/{}:{}".format(sdk_dir_name, _external_part_name,
+ external_module_name)
+ ]
+ # java sdk module does not need to add libs
+ if not (sdk_module_info.get('type')
+ and sdk_module_info.get('type') == 'jar'):
+ external_lib_source = sdk_module_info.get('source')
+ libs += [
+ "//{}/{}/{}".format(sdk_dir_name, _external_part_name,
+ external_lib_source)
+ ]
+
+ result = {}
+ if deps:
+ result['deps'] = deps
+ if libs:
+ result['libs'] = libs
+ if include_dirs:
+ result['include_dirs'] = include_dirs
+
+ write_json_file(args.external_deps_temp_file, result)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/templates/common/get_subsystem_name.py b/dsoftbus/build/templates/common/get_subsystem_name.py
new file mode 100755
index 0000000000000000000000000000000000000000..fc996fed314db3f30b4dbaec754ae4d09ddae1ca
--- /dev/null
+++ b/dsoftbus/build/templates/common/get_subsystem_name.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import file_utils # noqa: E402
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--part-name', required=True)
+ parser.add_argument('--part-subsystem-info-file', required=False)
+ args = parser.parse_args()
+
+ part_subsystem_info_file = 'build_configs/parts_info/part_subsystem.json'
+ if args.part_subsystem_info_file:
+ part_subsystem_info_file = args.part_subsystem_info_file
+ if not os.path.exists(part_subsystem_info_file):
+ raise Exception(
+ "file '{}' does not exist.".format(part_subsystem_info_file))
+
+ data = file_utils.read_json_file(part_subsystem_info_file)
+ if data is None:
+ raise Exception(
+ "read file '{}' failed.".format(part_subsystem_info_file))
+
+ subsystem_name = data.get(args.part_name)
+ if subsystem_name is None or subsystem_name == '':
+ raise Exception("subsystem name error, part_name='{}'".format(
+ args.part_name))
+ print(subsystem_name)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/templates/common/ohos_templates.gni b/dsoftbus/build/templates/common/ohos_templates.gni
new file mode 100755
index 0000000000000000000000000000000000000000..49090330bc3af688d2431d2b32faf8420dbe6b3a
--- /dev/null
+++ b/dsoftbus/build/templates/common/ohos_templates.gni
@@ -0,0 +1,21 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+template("ohos_group") {
+ group(target_name) {
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ }
+}
diff --git a/dsoftbus/build/templates/cxx/cxx.gni b/dsoftbus/build/templates/cxx/cxx.gni
new file mode 100755
index 0000000000000000000000000000000000000000..db2a816e61fc31ea5c9f589c62f865b6183e67e1
--- /dev/null
+++ b/dsoftbus/build/templates/cxx/cxx.gni
@@ -0,0 +1,920 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/ohos/notice/notice.gni")
+import("//build/ohos_var.gni")
+import("//build/templates/metadata/module_info.gni")
+
+template("ohos_executable") {
+ assert(!defined(invoker.output_dir),
+ "output_dir is not allowed to be defined.")
+
+ if (defined(invoker.subsystem_name) && defined(invoker.part_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = invoker.part_name
+ } else if (defined(invoker.part_name)) {
+ part_name = invoker.part_name
+ _part_subsystem_info_file =
+ "$root_build_dir/build_configs/parts_info/part_subsystem.json"
+ _arguments = [
+ "--part-name",
+ part_name,
+ "--part-subsystem-info-file",
+ rebase_path(_part_subsystem_info_file, root_build_dir),
+ ]
+ get_subsystem_script = "//build/templates/common/get_subsystem_name.py"
+ subsystem_name =
+ exec_script(get_subsystem_script, _arguments, "trim string")
+ } else if (defined(invoker.subsystem_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = subsystem_name
+ } else {
+ subsystem_name = "common"
+ part_name = subsystem_name
+ }
+ assert(subsystem_name != "")
+ assert(part_name != "")
+
+ if (check_deps) {
+ deps_data = {
+ }
+ module_label = get_label_info(":${target_name}", "label_with_toolchain")
+ module_deps = []
+ if (defined(invoker.deps)) {
+ foreach(dep, invoker.deps) {
+ module_deps += [ get_label_info(dep, "label_no_toolchain") ]
+ }
+ }
+ module_ex_deps = []
+ if (defined(invoker.external_deps) && invoker.external_deps != []) {
+ module_ex_deps = invoker.external_deps
+ }
+ deps_data = {
+ part_name = part_name
+ module_label = module_label
+ deps = module_deps
+ external_deps = module_ex_deps
+ }
+
+ write_file("${root_out_dir}/deps_files/${part_name}__${target_name}.json",
+ deps_data,
+ "json")
+ }
+
+ _ohos_test = false
+ if (defined(invoker.ohos_test) && invoker.ohos_test) {
+ output_dir = invoker.test_output_dir
+ _ohos_test = true
+ } else {
+ output_dir = "${root_out_dir}/${subsystem_name}/${part_name}"
+ }
+
+ _sanitize_config_target = "${target_name}__sanitizer_config"
+ ohos_sanitizer_config(_sanitize_config_target) {
+ forward_variables_from(invoker,
+ [
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ }
+
+ _test_target = defined(invoker.testonly) && invoker.testonly
+ if (!_test_target) {
+ _main_target_name = target_name
+ _notice_target = "${_main_target_name}__notice"
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+
+ module_name = _main_target_name
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ }
+ }
+ target_label = get_label_info(":${target_name}", "label_with_toolchain")
+ target_toolchain = get_label_info(target_label, "toolchain")
+ executable("${target_name}") {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "configs",
+ "remove_configs",
+ "static_link",
+ "external_deps",
+ "install_images",
+ "module_install_dir",
+ "relative_install_dir",
+ "symlink_target_name",
+ "output_dir",
+ "install_enable",
+ "license_file",
+ "license_as_sources",
+ "use_exceptions",
+
+ # Sanitizer variables
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ output_dir = output_dir
+
+ if (defined(invoker.configs)) {
+ configs += invoker.configs
+ }
+ if (defined(invoker.remove_configs)) {
+ configs -= invoker.remove_configs
+ }
+ configs += [ ":$_sanitize_config_target" ]
+
+ if (defined(invoker.use_exceptions) && invoker.use_exceptions) {
+ configs += [ "//build/config/compiler:exceptions" ]
+ configs -= [ "//build/config/compiler:no_exceptions" ]
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!defined(libs)) {
+ libs = []
+ }
+ if (!defined(include_dirs)) {
+ include_dirs = []
+ }
+
+ if (defined(invoker.static_link) && invoker.static_link) {
+ no_default_deps = true
+ configs -= [ "//build/config:executable_config" ]
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+ ldflags += [ "-static" ]
+ if (is_ohos && use_musl) {
+ import("//build/config/ohos/musl.gni")
+ deps += [ "//third_party/musl:${musl_target_abi_name}_static_libs" ]
+ }
+ }
+
+ if (!defined(output_name)) {
+ output_name = target_name
+ }
+
+ if (defined(invoker.external_deps)) {
+ external_deps_script =
+ rebase_path("//build/templates/common/external_deps_handler.py")
+ external_deps_temp_file =
+ "$target_gen_dir/${part_name}__${target_name}_external_deps_temp.json"
+ arguments = [ "--external-deps" ]
+ arguments += invoker.external_deps
+ arguments += [
+ "--parts-src-flag-file",
+ rebase_path(parts_src_flag_file, root_build_dir),
+ "--external-deps-temp-file",
+ rebase_path(external_deps_temp_file, root_build_dir),
+ "--sdk-base-dir",
+ rebase_path("${innersdk_base_dir}", root_build_dir),
+ "--sdk-dir-name",
+ "${innersdk_dir_name}",
+ "--current-toolchain",
+ current_toolchain,
+ ]
+ if (is_use_sdk) {
+ arguments += [ "--use-sdk" ]
+ }
+ exec_script(external_deps_script, arguments)
+
+ external_deps_info = read_file(external_deps_temp_file, "json")
+ if (defined(external_deps_info.deps)) {
+ deps += external_deps_info.deps
+ }
+ if (defined(external_deps_info.libs)) {
+ libs += external_deps_info.libs
+ }
+ if (defined(external_deps_info.include_dirs)) {
+ include_dirs += external_deps_info.include_dirs
+ }
+ }
+
+ if (target_toolchain == "${current_toolchain}") {
+ install_module_info = {
+ module_def = target_label
+ part_name = part_name
+ module_info_file =
+ rebase_path(get_label_info(module_def, "target_out_dir"),
+ root_build_dir) + "/${target_name}_module_info.json"
+ subsystem_name = subsystem_name
+ part_name = part_name
+ toolchain = current_toolchain
+ toolchain_out_dir = rebase_path(root_out_dir, root_build_dir)
+ }
+ metadata = {
+ install_modules = [ install_module_info ]
+ }
+ }
+ if (!_test_target) {
+ deps += [ ":$_notice_target" ]
+ }
+ }
+
+ if (!_ohos_test) {
+ ohos_module_name = target_name
+ generate_module_info("${ohos_module_name}_info") {
+ module_name = ohos_module_name
+ module_type = "bin"
+
+ module_source_dir = "$root_out_dir"
+ if (defined(output_dir)) {
+ module_source_dir = output_dir
+ }
+
+ module_install_name = ohos_module_name
+ if (defined(invoker.output_name)) {
+ module_install_name = invoker.output_name
+ }
+
+ module_install_images = [ "system" ]
+ if (defined(invoker.install_images)) {
+ module_install_images = []
+ module_install_images += invoker.install_images
+ }
+
+ module_output_extension = executable_extension
+ if (defined(invoker.output_extension)) {
+ module_output_extension = "." + invoker.output_extension
+ }
+
+ if (is_double_framework) {
+ install_enable = false
+ } else {
+ install_enable = true
+ }
+ if (defined(invoker.install_enable)) {
+ install_enable = invoker.install_enable
+ }
+
+ if (defined(invoker.module_install_dir)) {
+ module_install_dir = invoker.module_install_dir
+ }
+
+ if (defined(invoker.relative_install_dir)) {
+ relative_install_dir = invoker.relative_install_dir
+ }
+
+ if (defined(invoker.symlink_target_name)) {
+ symlink_target_name = invoker.symlink_target_name
+ }
+ notice = "$target_out_dir/$ohos_module_name.notice.txt"
+ }
+ }
+}
+
+# Defines a shared_library
+#
+# The shared_library template is used to generated so file.
+#
+# Parameters
+#
+# subsystem_name (required)
+# [string]
+# configs (optional)
+# [list]
+# remove_cnofigs (optional)
+# [list]
+# version_script (optional)
+# [string]
+template("ohos_shared_library") {
+ assert(!defined(invoker.output_dir),
+ "output_dir is not allowed to be defined.")
+
+ if (defined(invoker.subsystem_name) && defined(invoker.part_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = invoker.part_name
+ } else if (defined(invoker.part_name)) {
+ part_name = invoker.part_name
+ _part_subsystem_info_file =
+ "$root_build_dir/build_configs/parts_info/part_subsystem.json"
+ _arguments = [
+ "--part-name",
+ part_name,
+ "--part-subsystem-info-file",
+ rebase_path(_part_subsystem_info_file, root_build_dir),
+ ]
+ get_subsystem_script = "//build/templates/common/get_subsystem_name.py"
+ subsystem_name =
+ exec_script(get_subsystem_script, _arguments, "trim string")
+ } else if (defined(invoker.subsystem_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = subsystem_name
+ } else {
+ subsystem_name = "common"
+ part_name = subsystem_name
+ }
+ assert(subsystem_name != "")
+ assert(part_name != "")
+
+ if (check_deps) {
+ deps_data = {
+ }
+ module_label = get_label_info(":${target_name}", "label_with_toolchain")
+ module_deps = []
+ if (defined(invoker.deps)) {
+ foreach(dep, invoker.deps) {
+ module_deps += [ get_label_info(dep, "label_no_toolchain") ]
+ }
+ }
+ module_ex_deps = []
+ if (defined(invoker.external_deps) && invoker.external_deps != []) {
+ module_ex_deps = invoker.external_deps
+ }
+ deps_data = {
+ part_name = part_name
+ module_label = module_label
+ deps = module_deps
+ external_deps = module_ex_deps
+ }
+ write_file("${root_out_dir}/deps_files/${part_name}__${target_name}.json",
+ deps_data,
+ "json")
+ }
+
+ output_dir = "${root_out_dir}/${subsystem_name}/${part_name}"
+
+ _sanitize_config_target = "${target_name}__sanitizer_config"
+ ohos_sanitizer_config(_sanitize_config_target) {
+ forward_variables_from(invoker,
+ [
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ }
+
+ _test_target = defined(invoker.testonly) && invoker.testonly
+ if (!_test_target) {
+ _notice_target = "${target_name}__notice"
+ _main_target_name = target_name
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+
+ module_name = _main_target_name
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ }
+ }
+
+ target_label = get_label_info(":${target_name}", "label_with_toolchain")
+ target_toolchain = get_label_info(target_label, "toolchain")
+ shared_library("${target_name}") {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "configs",
+ "remove_configs",
+ "no_default_deps",
+ "external_deps",
+ "install_images",
+ "module_install_dir",
+ "relative_install_dir",
+ "symlink_target_name",
+ "output_dir",
+ "install_enable",
+ "version_script",
+ "license_file",
+ "license_as_sources",
+ "use_exceptions",
+
+ # Sanitizer variables
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ output_dir = output_dir
+
+ if (!defined(inputs)) {
+ inputs = []
+ }
+
+ if (!defined(ldflags)) {
+ ldflags = []
+ }
+
+ if (defined(invoker.configs)) {
+ configs += invoker.configs
+ }
+ if (defined(invoker.remove_configs)) {
+ configs -= invoker.remove_configs
+ }
+
+ configs += [ ":$_sanitize_config_target" ]
+
+ if (defined(invoker.use_exceptions) && invoker.use_exceptions) {
+ configs += [ "//build/config/compiler:exceptions" ]
+ configs -= [ "//build/config/compiler:no_exceptions" ]
+ }
+
+ if (!defined(output_name)) {
+ output_name = target_name
+ }
+
+ if (defined(invoker.no_default_deps)) {
+ no_default_deps = invoker.no_default_deps
+ }
+
+ if (defined(invoker.version_script)) {
+ _version_script = rebase_path(invoker.version_script, root_build_dir)
+ inputs += [ invoker.version_script ]
+ ldflags += [ "-Wl,--version-script=${_version_script}" ]
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!_test_target) {
+ deps += [ ":$_notice_target" ]
+ }
+ if (!defined(libs)) {
+ libs = []
+ }
+ if (!defined(include_dirs)) {
+ include_dirs = []
+ }
+ if (defined(invoker.external_deps)) {
+ external_deps_script =
+ rebase_path("//build/templates/common/external_deps_handler.py")
+ external_deps_temp_file =
+ "$target_gen_dir/${part_name}__${target_name}_external_deps_temp.json"
+ arguments = [ "--external-deps" ]
+ arguments += invoker.external_deps
+ arguments += [
+ "--parts-src-flag-file",
+ rebase_path(parts_src_flag_file, root_build_dir),
+ "--external-deps-temp-file",
+ rebase_path(external_deps_temp_file, root_build_dir),
+ "--sdk-base-dir",
+ rebase_path("${innersdk_base_dir}", root_build_dir),
+ "--sdk-dir-name",
+ "${innersdk_dir_name}",
+ "--current-toolchain",
+ current_toolchain,
+ ]
+ if (is_use_sdk) {
+ arguments += [ "--use-sdk" ]
+ }
+
+ exec_script(external_deps_script, arguments, "string")
+
+ external_deps_info = read_file(external_deps_temp_file, "json")
+ if (defined(external_deps_info.deps)) {
+ deps += external_deps_info.deps
+ }
+ if (defined(external_deps_info.libs)) {
+ libs += external_deps_info.libs
+ }
+ if (defined(external_deps_info.include_dirs)) {
+ include_dirs += external_deps_info.include_dirs
+ }
+ }
+
+ #if (target_toolchain == "${current_toolchain}") {
+ install_module_info = {
+ module_def = target_label
+ module_info_file =
+ rebase_path(get_label_info(module_def, "target_out_dir"),
+ root_build_dir) + "/${target_name}_module_info.json"
+ subsystem_name = subsystem_name
+ part_name = part_name
+ toolchain = current_toolchain
+ toolchain_out_dir = rebase_path(root_out_dir, root_build_dir)
+ }
+ metadata = {
+ install_modules = [ install_module_info ]
+ }
+
+ #}
+ }
+
+ if (target_toolchain == "${current_toolchain}") {
+ ohos_module_name = target_name
+ generate_module_info("${ohos_module_name}_info") {
+ module_name = ohos_module_name
+ module_type = "lib"
+ module_source_dir = "$root_out_dir"
+ if (defined(output_dir)) {
+ module_source_dir = output_dir
+ }
+
+ module_install_name = ohos_module_name
+ if (defined(invoker.output_name)) {
+ module_install_name = invoker.output_name
+ }
+
+ module_install_images = [ "system" ]
+ if (defined(invoker.install_images)) {
+ module_install_images = []
+ module_install_images += invoker.install_images
+ }
+
+ module_output_extension = shlib_extension
+ if (defined(invoker.output_extension)) {
+ module_output_extension = "." + invoker.output_extension
+ }
+
+ install_enable = true
+ if (defined(invoker.install_enable)) {
+ install_enable = invoker.install_enable
+ }
+
+ if (defined(invoker.module_install_dir)) {
+ module_install_dir = invoker.module_install_dir
+ }
+
+ if (defined(invoker.relative_install_dir)) {
+ relative_install_dir = invoker.relative_install_dir
+ }
+
+ if (defined(invoker.symlink_target_name)) {
+ symlink_target_name = invoker.symlink_target_name
+ }
+
+ if (defined(invoker.output_prefix_override)) {
+ output_prefix_override = invoker.output_prefix_override
+ }
+ notice = "$target_out_dir/$ohos_module_name.notice.txt"
+ }
+ }
+}
+
+template("ohos_static_library") {
+ if (defined(invoker.subsystem_name) && defined(invoker.part_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = invoker.part_name
+ } else if (defined(invoker.part_name)) {
+ part_name = invoker.part_name
+ _part_subsystem_info_file =
+ "$root_build_dir/build_configs/parts_info/part_subsystem.json"
+ _arguments = [
+ "--part-name",
+ part_name,
+ "--part-subsystem-info-file",
+ rebase_path(_part_subsystem_info_file, root_build_dir),
+ ]
+ get_subsystem_script = "//build/templates/common/get_subsystem_name.py"
+ subsystem_name =
+ exec_script(get_subsystem_script, _arguments, "trim string")
+ } else if (defined(invoker.subsystem_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = subsystem_name
+ } else {
+ subsystem_name = "common"
+ part_name = subsystem_name
+ }
+ assert(subsystem_name != "")
+ assert(part_name != "")
+
+ if (check_deps) {
+ deps_data = {
+ }
+ module_label = get_label_info(":${target_name}", "label_with_toolchain")
+ module_deps = []
+ if (defined(invoker.deps)) {
+ foreach(dep, invoker.deps) {
+ module_deps += [ get_label_info(dep, "label_no_toolchain") ]
+ }
+ }
+ module_ex_deps = []
+ if (defined(invoker.external_deps) && invoker.external_deps != []) {
+ module_ex_deps = invoker.external_deps
+ }
+ deps_data = {
+ part_name = part_name
+ module_label = module_label
+ deps = module_deps
+ external_deps = module_ex_deps
+ }
+
+ write_file("${root_out_dir}/deps_files/${part_name}__${target_name}.json",
+ deps_data,
+ "json")
+ }
+
+ _sanitize_config_target = "${target_name}__sanitizer_config"
+ ohos_sanitizer_config(_sanitize_config_target) {
+ forward_variables_from(invoker,
+ [
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ }
+
+ _test_target = defined(invoker.testonly) && invoker.testonly
+ if (!_test_target) {
+ _notice_target = "${target_name}__notice"
+ _main_target_name = target_name
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+ module_type = "static_library"
+ module_name = _main_target_name
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ }
+ }
+
+ static_library(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "configs",
+ "remove_configs",
+ "no_default_deps",
+ "external_deps",
+ "license_file",
+ "license_as_sources",
+ "use_exceptions",
+
+ # Sanitizer variables
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ if (defined(invoker.configs)) {
+ configs += invoker.configs
+ }
+ if (defined(invoker.remove_configs)) {
+ configs -= invoker.remove_configs
+ }
+ configs -= [ "//build/config/compiler:thin_archive" ]
+ configs += [ ":$_sanitize_config_target" ]
+
+ if (defined(invoker.use_exceptions) && invoker.use_exceptions) {
+ configs += [ "//build/config/compiler:exceptions" ]
+ configs -= [ "//build/config/compiler:no_exceptions" ]
+ }
+
+ if (defined(invoker.no_default_deps)) {
+ no_default_deps = invoker.no_default_deps
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+ if (!_test_target) {
+ deps += [ ":$_notice_target" ]
+ }
+ if (!defined(libs)) {
+ libs = []
+ }
+ if (!defined(include_dirs)) {
+ include_dirs = []
+ }
+ if (defined(invoker.external_deps)) {
+ external_deps_script =
+ rebase_path("//build/templates/common/external_deps_handler.py")
+ external_deps_temp_file =
+ "$target_gen_dir/${part_name}__${target_name}_external_deps_temp.json"
+ arguments = [ "--external-deps" ]
+ arguments += invoker.external_deps
+ arguments += [
+ "--parts-src-flag-file",
+ rebase_path(parts_src_flag_file, root_build_dir),
+ "--external-deps-temp-file",
+ rebase_path(external_deps_temp_file, root_build_dir),
+ "--sdk-base-dir",
+ rebase_path("${innersdk_base_dir}", root_build_dir),
+ "--sdk-dir-name",
+ "${innersdk_dir_name}",
+ "--current-toolchain",
+ current_toolchain,
+ ]
+ if (is_use_sdk) {
+ arguments += [ "--use-sdk" ]
+ }
+
+ exec_script(external_deps_script, arguments)
+
+ external_deps_info = read_file(external_deps_temp_file, "json")
+ if (defined(external_deps_info.deps)) {
+ deps += external_deps_info.deps
+ }
+ if (defined(external_deps_info.libs)) {
+ libs += external_deps_info.libs
+ }
+ if (defined(external_deps_info.include_dirs)) {
+ include_dirs += external_deps_info.include_dirs
+ }
+ }
+ }
+}
+
+template("ohos_source_set") {
+ if (defined(invoker.subsystem_name) && defined(invoker.part_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = invoker.part_name
+ } else if (defined(invoker.part_name)) {
+ part_name = invoker.part_name
+ _part_subsystem_info_file =
+ "$root_build_dir/build_configs/parts_info/part_subsystem.json"
+ _arguments = [
+ "--part-name",
+ part_name,
+ "--part-subsystem-info-file",
+ rebase_path(_part_subsystem_info_file, root_build_dir),
+ ]
+ get_subsystem_script = "//build/templates/common/get_subsystem_name.py"
+ subsystem_name =
+ exec_script(get_subsystem_script, _arguments, "trim string")
+ } else if (defined(invoker.subsystem_name)) {
+ subsystem_name = invoker.subsystem_name
+ part_name = subsystem_name
+ } else {
+ subsystem_name = "common"
+ part_name = subsystem_name
+ }
+ assert(subsystem_name != "")
+ assert(part_name != "")
+
+ if (check_deps) {
+ deps_data = {
+ }
+ module_label = get_label_info(":${target_name}", "label_with_toolchain")
+ module_deps = []
+ if (defined(invoker.deps)) {
+ foreach(dep, invoker.deps) {
+ module_deps += [ get_label_info(dep, "label_no_toolchain") ]
+ }
+ }
+ module_ex_deps = []
+ if (defined(invoker.external_deps) && invoker.external_deps != []) {
+ module_ex_deps = invoker.external_deps
+ }
+ deps_data = {
+ part_name = part_name
+ module_label = module_label
+ deps = module_deps
+ external_deps = module_ex_deps
+ }
+ write_file("${root_out_dir}/deps_files/${part_name}__${target_name}.json",
+ deps_data,
+ "json")
+ }
+
+ _sanitize_config_target = "${target_name}__sanitizer_config"
+ ohos_sanitizer_config(_sanitize_config_target) {
+ forward_variables_from(invoker,
+ [
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ }
+
+ _test_target = defined(invoker.testonly) && invoker.testonly
+ if (!_test_target) {
+ _main_target_name = target_name
+ _notice_target = "${_main_target_name}__notice"
+ collect_notice(_notice_target) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "license_as_sources",
+ "license_file",
+ ])
+
+ module_type = "source_set"
+ module_name = _main_target_name
+ module_source_dir = get_label_info(":${_main_target_name}", "dir")
+ }
+ }
+
+ source_set(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "configs",
+ "remove_configs",
+ "no_default_deps",
+ "external_deps",
+ "license_file",
+ "license_as_sources",
+ "use_exceptions",
+
+ # Sanitizer variables
+ "cfi",
+ "scs",
+ "scudo",
+ "ubsan",
+ "boundary_sanitize",
+ "integer_overflow_sanitize",
+ ])
+ if (defined(invoker.configs)) {
+ configs += invoker.configs
+ }
+ if (defined(invoker.remove_configs)) {
+ configs -= invoker.remove_configs
+ }
+
+ configs += [ ":$_sanitize_config_target" ]
+
+ if (defined(invoker.use_exceptions) && invoker.use_exceptions) {
+ configs += [ "//build/config/compiler:exceptions" ]
+ configs -= [ "//build/config/compiler:no_exceptions" ]
+ }
+
+ if (defined(invoker.no_default_deps)) {
+ no_default_deps = invoker.no_default_deps
+ }
+
+ if (!defined(deps)) {
+ deps = []
+ }
+
+ if (!_test_target) {
+ deps += [ ":$_notice_target" ]
+ }
+
+ if (!defined(libs)) {
+ libs = []
+ }
+ if (!defined(include_dirs)) {
+ include_dirs = []
+ }
+ if (defined(invoker.external_deps)) {
+ external_deps_script =
+ rebase_path("//build/templates/common/external_deps_handler.py")
+ external_deps_temp_file =
+ "$target_gen_dir/${part_name}__${target_name}_external_deps_temp.json"
+ arguments = [ "--external-deps" ]
+ arguments += invoker.external_deps
+ arguments += [
+ "--parts-src-flag-file",
+ rebase_path(parts_src_flag_file, root_build_dir),
+ "--external-deps-temp-file",
+ rebase_path(external_deps_temp_file, root_build_dir),
+ "--sdk-base-dir",
+ rebase_path("${innersdk_base_dir}", root_build_dir),
+ "--sdk-dir-name",
+ "${innersdk_dir_name}",
+ "--current-toolchain",
+ current_toolchain,
+ ]
+ if (is_use_sdk) {
+ arguments += [ "--use-sdk" ]
+ }
+
+ exec_script(external_deps_script, arguments)
+
+ external_deps_info = read_file(external_deps_temp_file, "json")
+ if (defined(external_deps_info.deps)) {
+ deps += external_deps_info.deps
+ }
+ if (defined(external_deps_info.libs)) {
+ libs += external_deps_info.libs
+ }
+ if (defined(external_deps_info.include_dirs)) {
+ include_dirs += external_deps_info.include_dirs
+ }
+ }
+ }
+}
diff --git a/dsoftbus/build/templates/cxx/prebuilt.gni b/dsoftbus/build/templates/cxx/prebuilt.gni
new file mode 100644
index 0000000000000000000000000000000000000000..06cd7731799cfda9c6fbeb5f892df7df7019b38f
--- /dev/null
+++ b/dsoftbus/build/templates/cxx/prebuilt.gni
@@ -0,0 +1,163 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/templates/common/copy.gni")
+
+template("ohos_prebuilt_executable") {
+ assert(defined(invoker.source), "source must be defined for ${target_name}.")
+
+ ohos_copy(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+
+ "deps",
+ "public_configs",
+ "subsystem_name",
+ "part_name",
+
+ # For generate_module_info
+ "install_images",
+ "module_install_dir",
+ "relative_install_dir",
+ "symlink_target_name",
+
+ # Open source license related
+ "license_file",
+ "license_as_sources",
+ ])
+ set_sources_assignment_filter([])
+ sources = [ invoker.source ]
+ outputs = [ "${target_out_dir}/${invoker.source}" ]
+ module_type = "bin"
+ prebuilt = true
+ install_enable = false
+ if (defined(invoker.install_enable)) {
+ install_enable = invoker.install_enable
+ }
+ }
+}
+
+template("ohos_prebuilt_shared_library") {
+ assert(defined(invoker.source), "source must be defined for ${target_name}.")
+
+ _copy_output = "${target_out_dir}/${invoker.source}"
+ config("${target_name}__config") {
+ libs = [ _copy_output ]
+ }
+ ohos_copy(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+
+ "deps",
+ "public_configs",
+ "subsystem_name",
+ "part_name",
+
+ # For generate_module_info
+ "install_images",
+ "module_install_dir",
+ "relative_install_dir",
+ "symlink_target_name",
+
+ # Open source license related
+ "license_file",
+ "license_as_sources",
+ ])
+ set_sources_assignment_filter([])
+ sources = [ invoker.source ]
+ outputs = [ _copy_output ]
+ module_type = "lib"
+ prebuilt = true
+ install_enable = true
+ if (defined(invoker.install_enable)) {
+ install_enable = invoker.install_enable
+ }
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
+ public_configs += [ ":${target_name}__config" ]
+ }
+}
+
+template("ohos_prebuilt_static_library") {
+ assert(defined(invoker.source), "source must be defined for ${target_name}.")
+
+ _copy_output = "${target_out_dir}/${invoker.source}"
+ config("${target_name}__config") {
+ libs = [ _copy_output ]
+ }
+ ohos_copy(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+
+ "deps",
+ "public_configs",
+ "subsystem_name",
+ "part_name",
+
+ # Open source license related
+ "license_file",
+ "license_as_sources",
+ ])
+ set_sources_assignment_filter([])
+ sources = [ invoker.source ]
+ outputs = [ _copy_output ]
+ bypass_module_info_generation = true
+ if (!defined(public_configs)) {
+ public_configs = []
+ }
+ public_configs += [ ":${target_name}__config" ]
+ }
+}
+
+template("ohos_prebuilt_etc") {
+ assert(defined(invoker.source), "source must be defined for ${target_name}.")
+
+ ohos_copy(target_name) {
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+
+ "deps",
+ "public_configs",
+ "subsystem_name",
+ "part_name",
+
+ # For generate_module_info
+ "install_images",
+ "module_install_dir",
+ "relative_install_dir",
+ "symlink_target_name",
+
+ # Open source license related
+ "license_file",
+ "license_as_sources",
+ ])
+ set_sources_assignment_filter([])
+ sources = [ invoker.source ]
+ outputs = [ "${target_out_dir}/${invoker.source}" ]
+ module_type = "etc"
+ prebuilt = true
+ install_enable = true
+ if (defined(invoker.install_enable)) {
+ install_enable = invoker.install_enable
+ }
+ }
+}
diff --git a/dsoftbus/build/templates/metadata/gen_module_info.py b/dsoftbus/build/templates/metadata/gen_module_info.py
new file mode 100755
index 0000000000000000000000000000000000000000..fedab0699063ee0c80b9f02ef89431a0fee912a8
--- /dev/null
+++ b/dsoftbus/build/templates/metadata/gen_module_info.py
@@ -0,0 +1,185 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util.file_utils import write_json_file # noqa: E402
+
+
+def get_source_name(module_type, name, prefix_override, suffix,
+ alternative_suffix):
+ """Generate source file name by type."""
+ if (module_type == 'lib'
+ or module_type == 'lib64') and not prefix_override:
+ if not name.startswith('lib'):
+ name = 'lib' + name
+ alias = ''
+ if module_type == 'java_library' and alternative_suffix:
+ alias = '%s%s' % (name, alternative_suffix)
+ if suffix:
+ name = '%s%s' % (name, suffix)
+ if module_type == 'none':
+ name = ''
+ return name, alias
+
+
+def get_type_dir(module_type):
+ """Get the install directory by type."""
+ return module_type
+
+
+def _gen_install_dest(base_dir, module_install_dir, relative_install_dir,
+ module_type):
+ """Generate module install dir by user config."""
+ if module_install_dir != '':
+ _install_dir = os.path.join(base_dir, module_install_dir)
+ elif relative_install_dir != '':
+ _install_dir = os.path.join(base_dir, get_type_dir(module_type),
+ relative_install_dir)
+ else:
+ _install_dir = os.path.join(base_dir, get_type_dir(module_type))
+ return _install_dir
+
+
+def gen_install_dests(system_base_dir, ramdisk_base_dir, vendor_base_dir,
+ updater_base_dir, source_file_name, install_images,
+ module_install_dir, relative_install_dir, module_type):
+ """Generate module install dir by user config."""
+ dests = []
+ if module_type == "none":
+ return dests
+ dest = ''
+ for image in install_images:
+ if image == 'system':
+ dest = _gen_install_dest(system_base_dir, module_install_dir,
+ relative_install_dir, module_type)
+ elif image == 'ramdisk':
+ dest = _gen_install_dest(ramdisk_base_dir, module_install_dir,
+ relative_install_dir, module_type)
+ elif image == 'vendor':
+ dest = _gen_install_dest(vendor_base_dir, module_install_dir,
+ relative_install_dir, module_type)
+ elif image == 'updater':
+ dest = _gen_install_dest(updater_base_dir, module_install_dir,
+ relative_install_dir, module_type)
+ dests.append(os.path.join(dest, source_file_name))
+ return dests
+
+
+def gen_module_info(module_type, module_label, module_name, source_dir,
+ module_source, module_alt_source, install_dests,
+ symlink_target, install_enable, collect, notice):
+ """Generate module install info."""
+ source = os.path.join(source_dir, module_source)
+ data = {
+ 'type': module_type,
+ 'label': module_label,
+ 'label_name': module_name,
+ 'source': source,
+ 'dest': install_dests,
+ 'collect': collect,
+ 'install_enable': install_enable
+ }
+ if notice:
+ data['notice'] = notice
+ if module_type == 'java_library':
+ data['alternative_source'] = os.path.join(source_dir,
+ module_alt_source)
+ if symlink_target:
+ data['symlink'] = symlink_target
+ return data
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--system-base-dir', required=True)
+ parser.add_argument('--ramdisk-base-dir', required=True)
+ parser.add_argument('--vendor-base-dir', required=True)
+ parser.add_argument('--updater-base-dir', required=True)
+ parser.add_argument('--label-name', help='module name', required=True)
+ parser.add_argument('--target-label', help='target label', required=True)
+ parser.add_argument('--type', help='module type', required=True)
+ parser.add_argument('--source-dir', help='', required=True)
+ parser.add_argument('--install-images', nargs='+', help='')
+ parser.add_argument('--install-name', help='', required=False)
+ parser.add_argument('--suffix', help='', required=False)
+ parser.add_argument('--alternative-suffix',
+ help='alternative extension for java library targets',
+ required=False)
+ parser.add_argument('--symlink-target', nargs='+', help='', required=False)
+ parser.add_argument('--output-file', help='', required=True)
+ parser.add_argument('--prebuilt', dest='prebuilt', action='store_true')
+ parser.add_argument('--no-prebuilt', dest='prebuilt', action='store_false')
+ parser.set_defaults(prebuilt=False)
+ parser.add_argument('--module-source', help='', required=False)
+ parser.add_argument('--install-enable',
+ dest='install_enable',
+ action='store_false')
+ parser.set_defaults(install_enable=True)
+ parser.add_argument('--collect', dest='collect', action='store_true')
+ parser.add_argument('--notice', help='path to notice')
+ parser.set_defaults(collect=False)
+ parser.add_argument('--module-install-dir', help='', required=False)
+ parser.add_argument('--relative-install-dir', help='', required=False)
+ parser.add_argument('--prefix-override',
+ dest='prefix_override',
+ action='store_true')
+ parser.add_argument('--no-prefix-override',
+ dest='prefix_override',
+ action='store_false')
+ parser.set_defaults(prefix_override=False)
+ args = parser.parse_args()
+
+ module_source = ''
+ module_alt_source = ''
+ source_file_name = ''
+ if args.prebuilt:
+ source_file_name = os.path.basename(args.module_source)
+ module_source = args.module_source
+ if module_source.startswith("//"):
+ module_source = module_source[2:]
+ else:
+ source_file_name, alt_source_file_name = get_source_name(
+ args.type, args.install_name, args.prefix_override, args.suffix,
+ args.alternative_suffix)
+ module_source = source_file_name
+ module_alt_source = alt_source_file_name
+
+ install_dests = []
+ if args.install_images:
+ install_dests = gen_install_dests(
+ args.system_base_dir, args.ramdisk_base_dir, args.vendor_base_dir,
+ args.updater_base_dir, source_file_name, args.install_images,
+ args.module_install_dir, args.relative_install_dir, args.type)
+
+ module_info_data = gen_module_info(args.type, args.target_label,
+ args.label_name, args.source_dir,
+ module_source, module_alt_source,
+ install_dests, args.symlink_target,
+ args.install_enable, args.collect,
+ args.notice)
+
+ # write module info file
+ write_json_file(args.output_file, module_info_data)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/templates/metadata/module_info.gni b/dsoftbus/build/templates/metadata/module_info.gni
new file mode 100644
index 0000000000000000000000000000000000000000..40a51403ce4dd655c1d4ac590aac8933b8a08d07
--- /dev/null
+++ b/dsoftbus/build/templates/metadata/module_info.gni
@@ -0,0 +1,306 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/ohos_var.gni")
+
+# Gen module info to file
+# private template
+#
+template("generate_module_info") {
+ assert(defined(invoker.module_name),
+ "module_label must be defined for ${target_name}.")
+ assert(defined(invoker.module_type),
+ "module_type must be defined for ${target_name}.")
+
+ forward_variables_from(invoker,
+ [
+ "testonly",
+ "visibility",
+ ])
+ forward_variables_from(invoker,
+ [
+ "module_label",
+ "module_name",
+ "module_source_dir",
+ "module_install_images",
+ "module_type",
+ "module_install_name",
+ "module_install_dir",
+ "relative_install_dir",
+ "module_output_extension",
+ "module_source",
+ "notice",
+ ])
+
+ if (!defined(module_label)) {
+ if (module_type == "java_library" || module_type == "none" ||
+ module_type == "app" || module_type == "dex") {
+ module_label = get_label_info(":$module_name", "label_no_toolchain")
+ } else {
+ module_label = get_label_info(":$module_name", "label_with_toolchain")
+ }
+ }
+ module_deps = [ get_label_info(module_label, "label_no_toolchain") ]
+
+ gen_script = rebase_path("//build/templates/metadata/gen_module_info.py")
+
+ if (!defined(module_source_dir)) {
+ module_source_dir = "$root_out_dir"
+ }
+ if (!defined(module_install_name)) {
+ module_install_name = module_name
+ }
+ if (!defined(module_output_extension)) {
+ module_output_extension = ""
+ }
+
+ output_file = "${target_out_dir}/${module_name}_module_info.json"
+
+ # reset module_type when module_type is lib
+ if (module_type == "lib") {
+ if (target_cpu == "arm64" || target_cpu == "x86_64") {
+ module_type = "lib64"
+ } else if (target_cpu == "arm" || target_cpu == "x86") {
+ module_type = "lib"
+ } else {
+ assert(false, "Error: unsupported target cpu name.")
+ }
+ }
+
+ if (!is_standard_system && is_asan &&
+ ("${module_type}" == "lib" || "${module_type}" == "lib64" ||
+ "${module_type}" == "bin")) {
+ system_base_dir = "${system_base_dir}/asan"
+ }
+
+ arguments = [
+ "--system-base-dir",
+ system_base_dir,
+ "--ramdisk-base-dir",
+ ramdisk_base_dir,
+ "--vendor-base-dir",
+ vendor_base_dir,
+ "--updater-base-dir",
+ updater_base_dir,
+ "--type",
+ module_type,
+ "--target-label",
+ module_label,
+ "--source-dir",
+ rebase_path(module_source_dir, "$root_build_dir"),
+ "--label-name",
+ module_name,
+ "--install-name",
+ module_install_name,
+ "--output-file",
+ rebase_path(output_file, "$root_build_dir"),
+ ]
+
+ if (defined(invoker.module_alt_output_extension)) {
+ arguments += [
+ "--alternative-suffix",
+ invoker.module_alt_output_extension,
+ ]
+ }
+
+ if (defined(invoker.install_enable) && !invoker.install_enable) {
+ arguments += [ "--install-enable" ]
+ }
+ if (defined(invoker.collect) && invoker.collect) {
+ arguments += [ "--collect" ]
+ }
+
+ if (defined(module_install_images)) {
+ if (!defined(module_install_dir)) {
+ module_install_dir = ""
+ }
+ if (!defined(relative_install_dir)) {
+ relative_install_dir = ""
+ }
+ arguments += [
+ "--module-install-dir",
+ module_install_dir,
+ "--relative-install-dir",
+ relative_install_dir,
+ "--install-images",
+ ]
+ arguments += module_install_images
+ }
+
+ # if prebuilt, add prebuilt=True
+ if (defined(invoker.prebuilt) && invoker.prebuilt) {
+ arguments += [
+ "--prebuilt",
+ "--module-source",
+ module_source,
+ ]
+ } else {
+ arguments += [
+ "--install-name",
+ module_install_name,
+ ]
+ }
+
+ if (module_output_extension != "") {
+ arguments += [
+ "--suffix",
+ module_output_extension,
+ ]
+ }
+
+ if (defined(invoker.symlink_target_name)) {
+ arguments += [ "--symlink-target" ]
+ arguments += invoker.symlink_target_name
+ }
+
+ if (defined(invoker.output_prefix_override) &&
+ invoker.output_prefix_override) {
+ arguments += [ "--prefix-override" ]
+ }
+ if (defined(notice)) {
+ arguments += [
+ "--notice",
+ rebase_path(notice, root_build_dir),
+ ]
+ }
+
+ exec_script(gen_script, arguments)
+
+ generated_file("${target_name}") {
+ outputs = [ "${target_out_dir}/${module_name}_install_info.json" ]
+ data_keys = [ "install_modules" ]
+ output_conversion = "json"
+ deps = module_deps
+ }
+}
+
+template("write_meta_data") {
+ assert(defined(invoker.meta_data), "meta_data must be defined")
+ assert(defined(invoker.type), "type must be defined")
+ forward_variables_from(invoker, [ "testonly" ])
+
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "deps",
+ "meta_data",
+ "type",
+ "resources",
+ "assets",
+ "hap_path",
+ "hap_profile",
+ "package_name",
+ ])
+ if (type != "js_assets" && type != "assets" && type != "resources" &&
+ type != "hap") {
+ assert(false, "type must be one of [js_assets, assets, resources, hap]")
+ }
+ if (type != "hap") {
+ _parent_invoker = invoker.invoker
+ _caller_target_name = get_label_info(":${_parent_invoker.target_name}",
+ "label_no_toolchain")
+ set_sources_assignment_filter(app_target_allowlist)
+ sources = [ _caller_target_name ]
+ if (sources != []) {
+ if (type == "js_assets" || type == "assets") {
+ assert(
+ false,
+ "Illegal target name: \"${_caller_target_name}\", target name of ohos_js_assets or ohos_assets must endswith assets or asset ")
+ } else if (type == "resources") {
+ assert(
+ false,
+ "Illegal target name: \"${_caller_target_name}\", target name of ohos_resources must endswith res or resources or resource")
+ }
+ }
+ set_sources_assignment_filter([])
+ }
+
+ script = "//build/templates/metadata/write_meta_data.py"
+ outputs = [ meta_data ]
+ depfile = "$target_gen_dir/$target_name.d"
+
+ args = [
+ "--output",
+ rebase_path(meta_data, root_build_dir),
+ "--type",
+ type,
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ ]
+
+ if (!defined(deps)) {
+ deps = []
+ }
+
+ _possible_deps = []
+ if (defined(invoker.possible_deps)) {
+ _possible_deps = invoker.possible_deps
+ }
+
+ _deps_metadata = []
+ inputs = []
+ set_sources_assignment_filter(app_target_allowlist)
+ foreach(d, _possible_deps) {
+ sources = []
+ _target_label = get_label_info(d, "label_no_toolchain")
+ sources += [ _target_label ]
+ if (sources == []) {
+ _deps_metadata += [ get_label_info(d, "target_gen_dir") + "/" +
+ get_label_info(d, "name") + ".metadata" ]
+ deps += [ "${_target_label}__metadata" ]
+ }
+ }
+ set_sources_assignment_filter([])
+ if (_deps_metadata != []) {
+ args += [ "--deps-metadata" ]
+ foreach(d, _deps_metadata) {
+ args += [ rebase_path(d, root_build_dir) ]
+ }
+ inputs += _deps_metadata
+ }
+ if (defined(resources)) {
+ args += [ "--resources" ]
+ foreach(res, resources) {
+ args += [ rebase_path(res, root_build_dir) ]
+ }
+ if (defined(package_name)) {
+ args += [
+ "--package-name",
+ package_name,
+ ]
+ }
+ }
+
+ if (defined(hap_profile)) {
+ args += [
+ "--hap-profile",
+ rebase_path(hap_profile, root_build_dir),
+ ]
+ }
+
+ if (defined(assets)) {
+ args += [ "--assets" ]
+ foreach(asset, assets) {
+ args += [ rebase_path(asset, root_build_dir) ]
+ }
+ }
+ if (defined(hap_path)) {
+ args += [
+ "--hap-path",
+ rebase_path(hap_path, root_build_dir),
+ ]
+ }
+ }
+}
diff --git a/dsoftbus/build/templates/metadata/write_meta_data.py b/dsoftbus/build/templates/metadata/write_meta_data.py
new file mode 100755
index 0000000000000000000000000000000000000000..19725a6f79cf88dfe8ae822a695f23222f4f151c
--- /dev/null
+++ b/dsoftbus/build/templates/metadata/write_meta_data.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import argparse
+import os
+import json
+import copy
+
+sys.path.append(
+ os.path.dirname(os.path.dirname(os.path.dirname(
+ os.path.abspath(__file__)))))
+from scripts.util import build_utils
+
+deps_metadta_cache = {}
+
+
+def get_all_deps(direct_deps):
+ if direct_deps == []:
+ return []
+ deps = copy.deepcopy(direct_deps)
+ all_deps = set()
+ all_deps.update(deps)
+ while len(deps) > 0:
+ dep = deps.pop(0)
+ if dep not in deps_metadta_cache:
+ with open(dep) as dep_fp:
+ deps_metadta_cache[dep] = json.load(dep_fp).get('root')
+ for n in deps_metadta_cache[dep].get('metadata_deps'):
+ if n not in all_deps:
+ deps.append(n)
+ all_deps.add(n)
+ return sorted(list(all_deps))
+
+
+def get_deps_metadata(deps):
+ return [deps_metadta_cache[d] for d in deps]
+
+
+def get_deps_of_type(deps_data, target_type):
+ return [d for d in deps_data if d['type'] == target_type]
+
+
+class Deps(object):
+ def __init__(self, direct_deps):
+ self.direct_deps = direct_deps
+ self.all_deps = get_all_deps(direct_deps)
+ self.all_deps_data = get_deps_metadata(self.all_deps)
+
+ def All(self, target_type):
+ return get_deps_of_type(self.all_deps_data, target_type)
+
+
+def write_meta_data(options, direct_deps):
+ meta_data = {
+ 'root': {
+ 'type': options.type,
+ 'metadata_path': options.output,
+ 'metadata_deps': direct_deps
+ }
+ }
+ root = meta_data['root']
+ if options.type == 'js_assets' or options.type == 'assets':
+ root[options.type] = options.assets
+ if options.type == 'resources':
+ root[options.type] = options.resources
+ package_name = options.package_name
+ root['package_name'] = package_name if package_name else ""
+ if options.type == 'hap':
+ deps = Deps(direct_deps)
+ root['hap_path'] = options.hap_path
+ for target_type in ['js_assets', 'assets', 'resources']:
+ root[target_type] = []
+ for d in deps.All(target_type):
+ root[target_type].extend(d[target_type])
+ if options.type == 'hap' or options.type == 'resources':
+ hap_profile = options.hap_profile
+ root['hap_profile'] = hap_profile if hap_profile else ""
+ build_utils.write_json(meta_data, options.output, only_if_changed=True)
+
+
+def main():
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('--output',
+ help='output meta data file',
+ required=True)
+ parser.add_argument('--type', help='type of module', required=True)
+ parser.add_argument('--assets', nargs='+', help='assets directory')
+ parser.add_argument('--resources', nargs='+', help='resources directory')
+ parser.add_argument('--hap-path', help='path to output hap')
+ parser.add_argument('--depfile', help='path to .d file')
+ parser.add_argument('--deps-metadata', nargs="+", help='metadata deps')
+ parser.add_argument('--package-name',
+ help='package name for hap resources')
+ parser.add_argument('--hap-profile', help='path to hap profile')
+ options = parser.parse_args()
+ direct_deps = options.deps_metadata if options.deps_metadata else []
+
+ possible_input_strings = [
+ options.type, options.assets, options.resources, options.hap_path,
+ options.hap_profile, options.package_name
+ ]
+ input_strings = [x for x in possible_input_strings if x]
+
+ build_utils.call_and_write_depfile_if_stale(
+ lambda: write_meta_data(options, direct_deps),
+ options,
+ depfile_deps=direct_deps,
+ input_paths=direct_deps,
+ input_strings=input_strings,
+ output_paths=([options.output]),
+ force=False,
+ add_pydeps=False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/templates/metadata/write_meta_data.pydeps b/dsoftbus/build/templates/metadata/write_meta_data.pydeps
new file mode 100644
index 0000000000000000000000000000000000000000..b55c57763dd91872fa73f716f84b74f9aa59eada
--- /dev/null
+++ b/dsoftbus/build/templates/metadata/write_meta_data.pydeps
@@ -0,0 +1,9 @@
+# Generated by running:
+# build/print_python_deps.py --root build/templates/metadata --output build/templates/metadata/write_meta_data.pydeps build/templates/metadata/write_meta_data.py
+../../gn_helpers.py
+../../scripts/__init__.py
+../../scripts/util/__init__.py
+../../scripts/util/build_utils.py
+../../scripts/util/md5_check.py
+../../scripts/util/pycache.py
+write_meta_data.py
diff --git a/dsoftbus/build/test.gni b/dsoftbus/build/test.gni
new file mode 100755
index 0000000000000000000000000000000000000000..c20379a9b1426b4f5a9a0cd9b28e1a0272d09bbf
--- /dev/null
+++ b/dsoftbus/build/test.gni
@@ -0,0 +1,432 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/config/python.gni")
+import("//build/ohos.gni")
+import("//build/ohos_var.gni")
+import("//build/templates/cxx/cxx.gni")
+
+template("_testcase_resources") {
+ assert(defined(invoker.testcase_target_name))
+ assert(defined(invoker.test_output_dir))
+ assert(defined(invoker.module_out_path))
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+ action_with_pydeps(target_name) {
+ if (defined(invoker.testonly)) {
+ testonly = invoker.testonly
+ }
+ deps = _deps
+ inputs = []
+ script = "//build/ohos/testfwk/testcase_resource_copy.py"
+ output_file = "$target_out_dir/$target_name.json"
+ outputs = [ output_file ]
+ depfile = "$target_gen_dir/$target_name.d"
+ args = []
+ if (defined(invoker.resource_config_file)) {
+ args += [
+ "--resource-config-file",
+ rebase_path(invoker.resource_config_file, root_build_dir),
+ ]
+ inputs += [ invoker.resource_config_file ]
+ }
+ args += [
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--testcase-target-name",
+ invoker.testcase_target_name,
+ "--part-build-out-path",
+ rebase_path(root_out_dir, root_build_dir),
+ "--resource-output-path",
+ rebase_path(invoker.test_output_dir + "/resource", root_build_dir),
+ "--module-out-path",
+ invoker.module_out_path,
+ "--output-file",
+ rebase_path(output_file, root_build_dir),
+ ]
+ }
+}
+
+# ohos test template
+template("_ohos_test") {
+ assert(defined(invoker.test_type), "test_type is required.")
+ assert(defined(invoker.module_out_path))
+
+ _deps = []
+ if (defined(invoker.deps)) {
+ _deps += invoker.deps
+ }
+
+ # generate module list file in gn stage
+ _gen_module_list_script = "//build/ohos/testfwk/gen_module_list_files.py"
+ _arguments = [
+ "--target",
+ target_name,
+ "--target_label",
+ get_label_info(":$target_name", "label_with_toolchain"),
+ "--source_dir",
+ rebase_path(get_label_info(":$target_name", "dir"), root_build_dir),
+ "--test_type",
+ invoker.test_type,
+ ]
+
+ test_output_dir =
+ "$root_out_dir/tests/${invoker.test_type}/${invoker.module_out_path}"
+ _module_list_file = "$root_out_dir/module_list_files/${invoker.module_out_path}/$target_name.mlf"
+
+ _arguments += [
+ "--output_dir",
+ rebase_path(test_output_dir, root_build_dir),
+ "--module_list_file",
+ rebase_path(_module_list_file, root_build_dir),
+ ]
+ exec_script(_gen_module_list_script, _arguments)
+
+ # copy testcase resource
+ testcase_target_name = target_name
+ _testcase_resources("${testcase_target_name}_resource_copy") {
+ if (defined(invoker.resource_config_file)) {
+ resource_config_file = invoker.resource_config_file
+ }
+ module_out_path = invoker.module_out_path
+ deps = _deps
+ testonly = true
+ }
+
+ _has_sources = defined(invoker.sources) && invoker.sources != []
+ if (_has_sources) {
+ _c_sources_file = "$target_gen_dir/$target_name.sources"
+ write_file(_c_sources_file, rebase_path(invoker.sources, root_build_dir))
+ }
+
+ ohos_executable(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "test_type",
+ "module_out_path",
+ "visibility",
+ ])
+ forward_variables_from(invoker, [ "visibility" ])
+ if (!defined(deps)) {
+ deps = []
+ }
+ deps += [ ":${testcase_target_name}_resource_copy" ]
+
+ subsystem_name = "tests"
+ part_name = invoker.test_type
+ ohos_test = true
+ testonly = true
+ output_name = "$target_name"
+ }
+}
+
+template("ohos_unittest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "unittest"
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ deps += [ "//third_party/googletest:gtest_main" ]
+ }
+}
+
+template("ohos_moduletest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "moduletest"
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ deps += [ "//third_party/googletest:gtest_main" ]
+ }
+}
+
+template("ohos_systemtest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "systemtest"
+ }
+}
+
+template("ohos_performancetest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "performance"
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ deps += [
+ "//test/developertest/aw/cxx/hwext:performance_test_static",
+ "//third_party/googletest:gtest_main",
+ ]
+ }
+}
+
+template("ohos_securitytest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "security"
+ }
+}
+
+template("ohos_reliabilitytest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "reliability"
+ }
+}
+
+template("ohos_distributedtest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "distributedtest"
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ deps += [ "//test/developertest/aw/cxx/distributed:distributedtest_lib" ]
+ }
+}
+
+template("ohos_fuzztest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "deps",
+ "cflags",
+ ])
+ test_type = "fuzztest"
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ cflags = []
+ if (defined(invoker.cflags)) {
+ cflags += invoker.cflags
+ }
+ cflags += [
+ "-fno-sanitize-coverage=trace-pc-guard,edge,trace-cmp,indirect-calls,8bit-counters",
+ "-fsanitize=fuzzer",
+ ]
+ deps += [ "//test/developertest/libs/fuzzlib:libfuzzer" ]
+ }
+}
+
+template("ohos_benchmarktest") {
+ _ohos_test(target_name) {
+ forward_variables_from(invoker, "*", [ "deps" ])
+ test_type = "benchmark"
+
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+ deps += [ "//third_party/benchmark:benchmark" ]
+ }
+}
+
+template("_test_py_file_copy") {
+ assert(defined(invoker.sources), "sources is required.")
+ assert(defined(invoker.target_base_dir))
+ assert(defined(invoker.copy_output_dir))
+
+ action_with_pydeps(target_name) {
+ forward_variables_from(invoker,
+ [
+ "sources",
+ "testonly",
+ "visibility",
+ ])
+ script = "//build/ohos/testfwk/test_py_file_copy.py"
+ deps = []
+ if (defined(invoker.deps)) {
+ deps += invoker.deps
+ }
+
+ depfile = "$target_gen_dir/$target_name.d"
+ outfile = "$target_out_dir/$target_name.out"
+ outputs = [ outfile ]
+ args = [
+ "--target-base-dir",
+ rebase_path(invoker.target_base_dir, root_build_dir),
+ "--copy-output-dir",
+ rebase_path(invoker.copy_output_dir, root_build_dir),
+ "--outfile",
+ rebase_path(outfile, root_build_dir),
+ "--depfile",
+ rebase_path(depfile, root_build_dir),
+ "--source-files",
+ ]
+ args += rebase_path(sources, root_build_dir)
+ }
+}
+
+# python test template
+template("_ohos_test_py") {
+ assert(defined(invoker.test_type), "test_type is required.")
+ assert(defined(invoker.sources), "sources is required.")
+
+ _gen_module_list_script = "//build/ohos/testfwk/gen_module_list_files.py"
+ _arguments = [
+ "--target",
+ target_name,
+ "--target_label",
+ get_label_info(":$target_name", "label_with_toolchain"),
+ "--source_dir",
+ rebase_path(get_label_info(":$target_name", "dir"), root_build_dir),
+ "--test_type",
+ invoker.test_type,
+ ]
+
+ if (defined(invoker.module_out_path)) {
+ test_output_dir =
+ "$root_out_dir/tests/${invoker.test_type}/${invoker.module_out_path}"
+ _module_list_file = "$root_out_dir/module_list_files/${invoker.module_out_path}/$target_name.mlf"
+ } else {
+ test_output_dir = "$root_out_dir/tests/${invoker.test_type}"
+ _module_list_file = "$root_out_dir/module_list_files/$target_name.mlf"
+ }
+
+ _arguments += [
+ "--output_dir",
+ rebase_path(test_output_dir, root_build_dir),
+ "--module_list_file",
+ rebase_path(_module_list_file, root_build_dir),
+ ]
+ exec_script(_gen_module_list_script, _arguments)
+
+ _test_py_file_copy(target_name) {
+ testonly = true
+ target_base_dir = get_label_info(":$target_name", "dir")
+ copy_output_dir = test_output_dir
+ sources = get_path_info(invoker.sources, "abspath")
+ }
+}
+
+template("ohos_unittest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "unittest"
+ }
+}
+
+template("ohos_moduletest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "moduletest"
+ }
+}
+
+template("ohos_systemtest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "systemtest"
+ }
+}
+
+template("ohos_performancetest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "performance"
+ }
+}
+
+template("ohos_securitytest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "security"
+ }
+}
+
+template("ohos_reliabilitytest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "reliability"
+ }
+}
+
+template("ohos_distributedtest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "distributedtest"
+ }
+}
+
+template("ohos_fuzztest_py") {
+ _ohos_test_py(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "fuzztest"
+ }
+}
+
+#js api test template
+template("ohos_js_test") {
+ assert(defined(invoker.test_type), "test_type must be defined.")
+ assert(defined(invoker.hap_profile), "hap_profile must be defined.")
+ assert(defined(invoker.module_out_path), "module_out_path must be defined.")
+
+ # generate module list file in gn stage
+ _gen_module_list_script = "//build/ohos/testfwk/gen_module_list_files.py"
+ _arguments = [
+ "--target",
+ target_name,
+ "--target_label",
+ get_label_info(":$target_name", "label_with_toolchain"),
+ "--source_dir",
+ rebase_path(get_label_info(":$target_name", "dir"), root_build_dir),
+ "--test_type",
+ invoker.test_type,
+ ]
+
+ _test_output_dir =
+ "$root_out_dir/tests/${invoker.test_type}/${invoker.module_out_path}"
+ _module_list_file = "$root_out_dir/module_list_files/${invoker.module_out_path}/$target_name.mlf"
+
+ _arguments += [
+ "--output_dir",
+ rebase_path(_test_output_dir, root_build_dir),
+ "--module_list_file",
+ rebase_path(_module_list_file, root_build_dir),
+ ]
+ exec_script(_gen_module_list_script, _arguments)
+
+ ohos_hap(target_name) {
+ forward_variables_from(invoker,
+ "*",
+ [
+ "test_type",
+ "module_out_path",
+ "visibility",
+ ])
+ forward_variables_from(invoker, [ "visibility" ])
+
+ hap_path = "$_test_output_dir/$target_name.hap"
+ testonly = true
+ }
+}
+
+template("ohos_js_unittest") {
+ ohos_js_test(target_name) {
+ forward_variables_from(invoker, "*")
+ test_type = "unittest"
+ }
+}
diff --git a/dsoftbus/build/toolchain/BUILD.gn b/dsoftbus/build/toolchain/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..3eeb981128a969584a7ce1c9207c91eabb985822
--- /dev/null
+++ b/dsoftbus/build/toolchain/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/concurrent_links.gni")
+
+declare_args() {
+ # Pool for non goma tasks.
+ action_pool_depth = -1
+}
+
+if (action_pool_depth == -1) {
+ action_pool_depth = exec_script("get_cpu_count.py", [], "value")
+}
+
+if (current_toolchain == default_toolchain) {
+ pool("link_pool") {
+ depth = concurrent_links
+ }
+
+ pool("action_pool") {
+ depth = action_pool_depth
+ }
+}
diff --git a/dsoftbus/build/toolchain/cc_wrapper.gni b/dsoftbus/build/toolchain/cc_wrapper.gni
new file mode 100755
index 0000000000000000000000000000000000000000..f6ce4ba88a8f1af032ce38d5de4bc96fc3536310
--- /dev/null
+++ b/dsoftbus/build/toolchain/cc_wrapper.gni
@@ -0,0 +1,42 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of cc wrapper
+# ccache: a c/c++ compiler cache which can greatly reduce recompilation times.
+# icecc, distcc: it takes compile jobs from a build and distributes them among
+# remote machines allowing a parallel build.
+#
+# TIPS
+#
+# 1) ccache
+# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
+# these versions don't support -Xclang. (3.1.10 and later will silently
+# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
+# or not).
+#
+# Use ccache 3.2 or later to avoid clang unused argument warnings:
+# https://bugzilla.samba.org/show_bug.cgi?id=8118
+#
+# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
+# speed, you can do:
+# export CCACHE_CPP2=yes
+#
+# 2) icecc
+# Set clang_use_chrome_plugins=false because icecc cannot distribute custom
+# clang libraries.
+#
+# To use icecc and ccache together, set cc_wrapper = "ccache" with
+# export CCACHE_PREFIX=icecc
+
+_cc_wrapper = ""
+_ccache_exec = getenv("CCACHE_EXEC")
+_use_ccache = getenv("USE_CCACHE")
+if (_use_ccache == "1" && _ccache_exec != "") {
+ _cc_wrapper = rebase_path(_ccache_exec)
+}
+
+declare_args() {
+ # Set to "ccache", "icecc" or "distcc". Probably doesn't work on windows.
+ cc_wrapper = _cc_wrapper
+}
diff --git a/dsoftbus/build/toolchain/clang_static_analyzer.gni b/dsoftbus/build/toolchain/clang_static_analyzer.gni
new file mode 100755
index 0000000000000000000000000000000000000000..9cb797541c9d671df6987aa821a49747cd2c0165
--- /dev/null
+++ b/dsoftbus/build/toolchain/clang_static_analyzer.gni
@@ -0,0 +1,11 @@
+# Copyright (c) 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Clang static analysis tools.
+# See docs/clang_static_analyzer.md for more information.
+
+declare_args() {
+ # Uses the Clang static analysis tools during compilation.
+ use_clang_static_analyzer = false
+}
diff --git a/dsoftbus/build/toolchain/clang_static_analyzer_wrapper.py b/dsoftbus/build/toolchain/clang_static_analyzer_wrapper.py
new file mode 100755
index 0000000000000000000000000000000000000000..49da59f7cd28445eca00eff390fabb769de35b33
--- /dev/null
+++ b/dsoftbus/build/toolchain/clang_static_analyzer_wrapper.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2017 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adds an analysis build step to invocations of the Clang C/C++ compiler.
+
+Usage: clang_static_analyzer_wrapper.py [args...]
+"""
+
+import argparse
+import sys
+import wrapper_utils
+
+# Flags used to enable analysis for Clang invocations.
+analyzer_enable_flags = [
+ '--analyze',
+]
+
+# Flags used to configure the analyzer's behavior.
+analyzer_option_flags = [
+ '-fdiagnostics-show-option',
+ '-analyzer-checker=cplusplus',
+ '-analyzer-opt-analyze-nested-blocks',
+ '-analyzer-eagerly-assume',
+ '-analyzer-output=text',
+ '-analyzer-config',
+ 'suppress-c++-stdlib=true',
+
+ # List of checkers to execute.
+ # The full list of checkers can be found at
+ # https://clang-analyzer.llvm.org/available_checks.html.
+ '-analyzer-checker=core',
+ '-analyzer-checker=unix',
+ '-analyzer-checker=deadcode',
+]
+
+
+# Prepends every element of a list |args| with |token|.
+# e.g. ['-analyzer-foo', '-analyzer-bar'] => ['-Xanalyzer', '-analyzer-foo',
+# '-Xanalyzer', '-analyzer-bar']
+def interleave_args(args, token):
+ return list(sum(zip([token] * len(args), args), ()))
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--mode',
+ choices=['clang', 'cl'],
+ required=True,
+ help='Specifies the compiler argument convention '
+ 'to use.')
+ parser.add_argument('args', nargs=argparse.REMAINDER)
+ parsed_args = parser.parse_args()
+
+ prefix = '-Xclang' if parsed_args.mode == 'cl' else '-Xanalyzer'
+ cmd = parsed_args.args + analyzer_enable_flags + interleave_args(
+ analyzer_option_flags, prefix)
+ returncode, stderr = wrapper_utils.capture_command_stderr(
+ wrapper_utils.command_to_run(cmd))
+ sys.stderr.write(stderr)
+
+ return_code, stderr = wrapper_utils.capture_command_stderr(
+ wrapper_utils.command_to_run(parsed_args.args))
+ sys.stderr.write(stderr)
+
+ return return_code
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/toolchain/concurrent_links.gni b/dsoftbus/build/toolchain/concurrent_links.gni
new file mode 100755
index 0000000000000000000000000000000000000000..9de0af1a74027a05547459bcfc35dd900467d99d
--- /dev/null
+++ b/dsoftbus/build/toolchain/concurrent_links.gni
@@ -0,0 +1,58 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file should only be imported from files that define toolchains.
+# There's no way to enforce this exactly, but all toolchains are processed
+# in the context of the default_toolchain, so we can at least check for that.
+assert(current_toolchain == default_toolchain)
+
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/toolchain/toolchain.gni")
+
+declare_args() {
+ # Limit the number of concurrent links; we often want to run fewer
+ # links at once than we do compiles, because linking is memory-intensive.
+ # The default to use varies by platform and by the amount of memory
+ # available, so we call out to a script to get the right value.
+ concurrent_links = -1
+}
+
+if (concurrent_links == -1) {
+ if (use_thin_lto) {
+ _args = [
+ "--mem_per_link_gb=10",
+ "--reserve_mem_gb=10",
+ ]
+ } else if (use_sanitizer_coverage || use_fuzzing_engine) {
+ # Sanitizer coverage instrumentation increases linker memory consumption
+ # significantly.
+ _args = [ "--mem_per_link_gb=16" ]
+ } else if (is_win && symbol_level == 1 && !is_debug) {
+ _args = [ "--mem_per_link_gb=3" ]
+ } else if (is_win) {
+ _args = [ "--mem_per_link_gb=5" ]
+ } else if (is_mac) {
+ _args = [ "--mem_per_link_gb=4" ]
+ } else if (is_ohos && !is_component_build && symbol_level == 2) {
+ # Full debug symbols require large memory for link.
+ _args = [ "--mem_per_link_gb=25" ]
+ } else if (is_ohos && !is_debug && !using_sanitizer && symbol_level < 2) {
+ # Increase the number of concurrent links for release bots. Debug builds
+ # make heavier use of ProGuard, and so should not be raised. Sanitizers also
+ # increase the memory overhead.
+ if (symbol_level == 1) {
+ _args = [ "--mem_per_link_gb=6" ]
+ } else {
+ _args = [ "--mem_per_link_gb=4" ]
+ }
+ } else if (is_linux && !is_chromeos && symbol_level == 0) {
+ # Memory consumption on link without debug symbols is low on linux.
+ _args = [ "--mem_per_link_gb=3" ]
+ } else {
+ _args = []
+ }
+
+ concurrent_links = exec_script("get_concurrent_links.py", _args, "value")
+}
diff --git a/dsoftbus/build/toolchain/gcc_link_wrapper.py b/dsoftbus/build/toolchain/gcc_link_wrapper.py
new file mode 100755
index 0000000000000000000000000000000000000000..58b3368ccfa93d1b2aab49b4f0a9595f0930e766
--- /dev/null
+++ b/dsoftbus/build/toolchain/gcc_link_wrapper.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a linking command and optionally a strip command.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("link"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import wrapper_utils
+
+
+# When running on a Windows host and using a toolchain whose tools are
+# actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+# executables, the "command" to run has to be prefixed with this magic.
+# The GN toolchain definitions take care of that for when GN/Ninja is
+# running the tool directly. When that command is passed in to this
+# script, it appears as a unitary string but needs to be split up so that
+# just 'cmd' is the actual command given to Python's subprocess module.
+BAT_PREFIX = 'cmd /c call '
+
+
+def command_to_run(command):
+ if command[0].startswith(BAT_PREFIX):
+ command = command[0].split(None, 3) + command[1:]
+ return command
+
+
+def is_static_link(command):
+ if "-static" in command:
+ return True
+ else:
+ return False
+
+
+""" since static link and dynamic link have different CRT files on ohos,
+and we use dynamic link CRT files as default, so when link statically,
+we need change the CRT files
+"""
+
+
+def update_crt(command):
+ for item in command:
+ if str(item).find("crtbegin_dynamic.o") >= 0:
+ index = command.index(item)
+ new_crtbegin = str(item).replace("crtbegin_dynamic.o",
+ "crtbegin_static.o")
+ command[index] = new_crtbegin
+ return command
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--strip',
+ help='The strip binary to run',
+ metavar='PATH')
+ parser.add_argument('--unstripped-file',
+ help='Executable file produced by linking command',
+ metavar='FILE')
+ parser.add_argument('--map-file',
+ help=('Use --Wl,-Map to generate a map file. Will be '
+ 'gzipped if extension ends with .gz'),
+ metavar='FILE')
+ parser.add_argument('--output',
+ required=True,
+ help='Final output executable file',
+ metavar='FILE')
+ parser.add_argument('--clang_rt_dso_path',
+ help=('Clang asan runtime shared library')
+ )
+ parser.add_argument('command', nargs='+',
+ help='Linking command')
+ args = parser.parse_args()
+
+ # Work-around for gold being slow-by-default. http://crbug.com/632230
+ fast_env = dict(os.environ)
+ fast_env['LC_ALL'] = 'C'
+ if is_static_link(args.command):
+ command = update_crt(args.command)
+ if args.clang_rt_dso_path is not None:
+ return 0
+ else:
+ command = args.command
+ result = wrapper_utils.run_link_with_optional_map_file(command, env=fast_env,
+ map_file=args.map_file)
+ if result != 0:
+ return result
+
+ # Finally, strip the linked executable (if desired).
+ if args.strip:
+ result = subprocess.call(command_to_run([args.strip, '-o', args.output,
+ args.unstripped_file]))
+
+ return result
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/dsoftbus/build/toolchain/gcc_solink_wrapper.py b/dsoftbus/build/toolchain/gcc_solink_wrapper.py
new file mode 100755
index 0000000000000000000000000000000000000000..7bf117981498d883fd18038c4d43636a54f8e521
--- /dev/null
+++ b/dsoftbus/build/toolchain/gcc_solink_wrapper.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Runs 'ld -shared' and generates a .TOC file that's untouched when unchanged.
+
+This script exists to avoid using complex shell commands in
+gcc_toolchain.gni's tool("solink"), in case the host running the compiler
+does not have a POSIX-like shell (e.g. Windows).
+"""
+
+import argparse
+import os
+import subprocess
+import sys
+import shutil
+
+import wrapper_utils
+
+
+def collect_soname(args):
+ """Replaces: readelf -d $sofile | grep SONAME"""
+ toc = ''
+ readelf = subprocess.Popen(wrapper_utils.command_to_run(
+ [args.readelf, '-d', args.sofile]),
+ stdout=subprocess.PIPE,
+ bufsize=-1)
+ for line in readelf.stdout:
+ if b'SONAME' in line:
+ toc += line.decode()
+ return readelf.wait(), toc
+
+
+def collect_dyn_sym(args):
+ """Replaces: nm --format=posix -g -D $sofile | cut -f1-2 -d' '"""
+ toc = ''
+ _command = [args.nm]
+ if args.sofile.endswith('.dll'):
+ _command.append('--extern-only')
+ else:
+ _command.extend(['--format=posix', '-g', '-D'])
+ _command.append(args.sofile)
+ nm = subprocess.Popen(wrapper_utils.command_to_run(_command),
+ stdout=subprocess.PIPE,
+ bufsize=-1)
+ for line in nm.stdout:
+ toc += '{}\n'.format(' '.join(line.decode().split(' ', 2)[:2]))
+ return nm.wait(), toc
+
+
+def collect_toc(args):
+ result, toc = collect_soname(args)
+ if result == 0:
+ result, dynsym = collect_dyn_sym(args)
+ toc += dynsym
+ return result, toc
+
+
+def update_toc(tocfile, toc):
+ if os.path.exists(tocfile):
+ with open(tocfile, 'r') as f:
+ old_toc = f.read()
+ else:
+ old_toc = None
+ if toc != old_toc:
+ with open(tocfile, 'w') as fp:
+ fp.write(toc)
+
+
+def reformat_rsp_file(rspfile):
+ """ Move all implibs from --whole-archive section"""
+ with open(rspfile, "r") as fi:
+ rspcontent = fi.read()
+ result = []
+ implibs = []
+ naflag = False
+ for arg in rspcontent.split(" "):
+ if naflag and arg.endswith(".lib"):
+ implibs.append(arg)
+ continue
+ result.append(arg)
+ if arg == "-Wl,--whole-archive":
+ naflag = True
+ continue
+ if arg == "-Wl,--no-whole-archive":
+ naflag = False
+ result.extend(implibs)
+
+ with open(rspfile, "w") as fo:
+ fo.write(" ".join(result))
+
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--readelf',
+ required=True,
+ help='The readelf binary to run',
+ metavar='PATH')
+ parser.add_argument('--nm',
+ required=True,
+ help='The nm binary to run',
+ metavar='PATH')
+ parser.add_argument('--strip',
+ help='The strip binary to run',
+ metavar='PATH')
+ parser.add_argument('--sofile',
+ required=True,
+ help='Shared object file produced by linking command',
+ metavar='FILE')
+ parser.add_argument('--tocfile',
+ required=False,
+ help='Output table-of-contents file',
+ metavar='FILE')
+ parser.add_argument('--map-file',
+ help=('Use --Wl,-Map to generate a map file. Will be '
+ 'gzipped if extension ends with .gz'),
+ metavar='FILE')
+ parser.add_argument('--output',
+ required=True,
+ help='Final output shared object file',
+ metavar='FILE')
+ parser.add_argument('--libfile', required=False, metavar='FILE')
+ parser.add_argument('command', nargs='+', help='Linking command')
+ args = parser.parse_args()
+
+ if args.sofile.endswith(".dll"):
+ rspfile = None
+ for a in args.command:
+ if a[0] == "@":
+ rspfile = a[1:]
+ break
+ if rspfile:
+ reformat_rsp_file(rspfile)
+ # Work-around for gold being slow-by-default. http://crbug.com/632230
+ fast_env = dict(os.environ)
+ fast_env['LC_ALL'] = 'C'
+
+ # First, run the actual link.
+ command = wrapper_utils.command_to_run(args.command)
+ result = wrapper_utils.run_link_with_optional_map_file(
+ command, env=fast_env, map_file=args.map_file)
+
+ if result != 0:
+ return result
+
+ # Next, generate the contents of the TOC file.
+ result, toc = collect_toc(args)
+ if result != 0:
+ return result
+
+ # If there is an existing TOC file with identical contents, leave it alone.
+ # Otherwise, write out the TOC file.
+ if args.tocfile:
+ update_toc(args.tocfile, toc)
+
+ # Finally, strip the linked shared object file (if desired).
+ if args.strip:
+ result = subprocess.call(
+ wrapper_utils.command_to_run(
+ [args.strip, '-o', args.output, args.sofile]))
+ if args.libfile:
+ libfile_name = os.path.basename(args.libfile)
+ sofile_output_dir = os.path.dirname(args.sofile)
+ unstripped_libfile = os.path.join(sofile_output_dir, libfile_name)
+ shutil.copy2(unstripped_libfile, args.libfile)
+
+ return result
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/dsoftbus/build/toolchain/gcc_toolchain.gni b/dsoftbus/build/toolchain/gcc_toolchain.gni
new file mode 100755
index 0000000000000000000000000000000000000000..2707deaed7bcd4b0ee1b3075e3e41b1ad1bfd81f
--- /dev/null
+++ b/dsoftbus/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,598 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/compiler/compiler.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+import("//build/config/v8_target_cpu.gni")
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/clang_static_analyzer.gni")
+import("//build/toolchain/toolchain.gni")
+
+if (is_nacl) {
+ # To keep NaCl variables out of builds that don't include NaCl, all
+ # variables defined in nacl/config.gni referenced here should be protected by
+ # is_nacl conditions.
+ import("//build/config/nacl/config.gni")
+}
+
+declare_args() {
+ # Enables allowlist generation for IDR_ grit defines seen by the compiler.
+ # Currently works only on ohos and enabled by default for release builds.
+ # Requires debug info, so disabled for symbol_level=0 & strip_debug_info=true.
+ enable_resource_allowlist_generation =
+ is_ohos && !is_debug &&
+ # Always enable for official builds, but enable for release builds by
+ # default only when other args allow.
+ (is_official_build ||
+ (!strip_debug_info && symbol_level > 0 && !is_component_build))
+}
+
+# When the arg is set via args.gn, it applies to all toolchains. In order to not
+# hit the assert in grit_rule.gni, explicitly disable for host toolchains.
+if (is_linux && target_os == "ohos") {
+ enable_resource_allowlist_generation = false
+}
+
+# Path to the Clang static analysis wrapper script.
+# REVIEWERS: can you suggest a better location for this?
+# GN is really picky about dead stores of variables except at the global scope.
+analyzer_wrapper =
+ rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+ root_build_dir) + " --mode=clang"
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+# - ar
+# - cc
+# - cxx
+# - ld
+#
+# Optional parameters that control the tools:
+#
+# - extra_cflags
+# Extra flags to be appended when compiling C files (but not C++ files).
+# - extra_cppflags
+# Extra flags to be appended when compiling both C and C++ files. "CPP"
+# stands for "C PreProcessor" in this context, although it can be
+# used for non-preprocessor flags as well. Not to be confused with
+# "CXX" (which follows).
+# - extra_cxxflags
+# Extra flags to be appended when compiling C++ files (but not C files).
+# - extra_asmflags
+# Extra flags to be appended when compiling assembly.
+# - extra_ldflags
+# Extra flags to be appended when linking
+#
+# - libs_section_prefix
+# - libs_section_postfix
+# The contents of these strings, if specified, will be placed around
+# the libs section of the linker line. It allows one to inject libraries
+# at the beginning and end for all targets in a toolchain.
+# - solink_libs_section_prefix
+# - solink_libs_section_postfix
+# Same as libs_section_{pre,post}fix except used for solink instead of link.
+# - link_outputs
+# The content of this array, if specified, will be added to the list of
+# outputs from the link command. This can be useful in conjunction with
+# the post_link parameter.
+# - use_unstripped_as_runtime_outputs
+# When |strip| is set, mark unstripped executables as runtime deps rather
+# than stripped ones.
+# - post_link
+# The content of this string, if specified, will be run as a separate
+# command following the the link command.
+# - deps
+# Just forwarded to the toolchain definition.
+# - executable_extension
+# If this string is specified it will be used for the file extension
+# for an executable, rather than using no extension; targets will
+# still be able to override the extension using the output_extension
+# variable.
+# - rebuild_define
+# The contents of this string, if specified, will be passed as a #define
+# to the toolchain. It can be used to force recompiles whenever a
+# toolchain is updated.
+# - shlib_extension
+# If this string is specified it will be used for the file extension
+# for a shared library, rather than default value specified in
+# toolchain.gni
+# - strip
+# Location of the strip executable. When specified, strip will be run on
+# all shared libraries and executables as they are built. The pre-stripped
+# artifacts will be put in lib.unstripped/ and exe.unstripped/.
+template("gcc_toolchain") {
+ toolchain(target_name) {
+ assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+ assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+ assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+ assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+
+ # This define changes when the toolchain changes, forcing a rebuild.
+ # Nothing should ever use this define.
+ if (defined(invoker.rebuild_define)) {
+ rebuild_string = "-D" + invoker.rebuild_define + " "
+ } else {
+ rebuild_string = ""
+ }
+
+ # GN's syntax can't handle more than one scope dereference at once, like
+ # "invoker.toolchain_args.foo", so make a temporary to hold the toolchain
+ # args so we can do "invoker_toolchain_args.foo".
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must specify toolchain_args")
+ invoker_toolchain_args = invoker.toolchain_args
+ assert(defined(invoker_toolchain_args.current_cpu),
+ "toolchain_args must specify a current_cpu")
+ assert(defined(invoker_toolchain_args.current_os),
+ "toolchain_args must specify a current_os")
+
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker_toolchain_args, "*")
+
+ # The host toolchain value computed by the default toolchain's setup
+ # needs to be passed through unchanged to all secondary toolchains to
+ # ensure that it's always the same, regardless of the values that may be
+ # set on those toolchains.
+ host_toolchain = host_toolchain
+
+ if (!defined(invoker_toolchain_args.v8_current_cpu)) {
+ v8_current_cpu = invoker_toolchain_args.current_cpu
+ }
+ }
+
+ if (defined(toolchain_args.cc_wrapper)) {
+ toolchain_cc_wrapper = toolchain_args.cc_wrapper
+ } else {
+ toolchain_cc_wrapper = cc_wrapper
+ }
+
+ if (is_clang && use_clang_static_analyzer &&
+ (!defined(invoker.is_clang_analysis_supported) ||
+ invoker.is_clang_analysis_supported)) {
+ compiler_prefix = "${analyzer_wrapper} "
+ asm = invoker.cc
+ } else {
+ compiler_prefix = "${toolchain_cc_wrapper} "
+ }
+
+ cc = compiler_prefix + invoker.cc
+ cxx = compiler_prefix + invoker.cxx
+ ar = invoker.ar
+ ld = invoker.ld
+ if (!defined(asm)) {
+ asm = cc
+ }
+ if (defined(invoker.readelf)) {
+ readelf = invoker.readelf
+ } else {
+ readelf = "readelf"
+ }
+ if (defined(invoker.nm)) {
+ nm = invoker.nm
+ } else {
+ nm = "nm"
+ }
+
+ if (defined(invoker.shlib_extension)) {
+ default_shlib_extension = invoker.shlib_extension
+ } else {
+ default_shlib_extension = shlib_extension
+ }
+
+ if (defined(invoker.executable_extension)) {
+ default_executable_extension = invoker.executable_extension
+ } else {
+ default_executable_extension = ""
+ }
+
+ # Bring these into our scope for string interpolation with default values.
+ if (defined(invoker.libs_section_prefix)) {
+ libs_section_prefix = invoker.libs_section_prefix
+ } else {
+ libs_section_prefix = ""
+ }
+
+ if (defined(invoker.libs_section_postfix)) {
+ libs_section_postfix = invoker.libs_section_postfix
+ } else {
+ libs_section_postfix = ""
+ }
+
+ if (defined(invoker.solink_libs_section_prefix)) {
+ solink_libs_section_prefix = invoker.solink_libs_section_prefix
+ } else {
+ solink_libs_section_prefix = ""
+ }
+
+ if (defined(invoker.solink_libs_section_postfix)) {
+ solink_libs_section_postfix = invoker.solink_libs_section_postfix
+ } else {
+ solink_libs_section_postfix = ""
+ }
+
+ if (defined(invoker.extra_cflags) && invoker.extra_cflags != "") {
+ extra_cflags = " " + invoker.extra_cflags
+ } else {
+ extra_cflags = ""
+ }
+
+ if (defined(invoker.extra_cppflags) && invoker.extra_cppflags != "") {
+ extra_cppflags = " " + invoker.extra_cppflags
+ } else {
+ extra_cppflags = ""
+ }
+
+ if (defined(invoker.extra_cxxflags) && invoker.extra_cxxflags != "") {
+ extra_cxxflags = " " + invoker.extra_cxxflags
+ } else {
+ extra_cxxflags = ""
+ }
+
+ if (defined(invoker.extra_asmflags) && invoker.extra_asmflags != "") {
+ extra_asmflags = " " + invoker.extra_asmflags
+ } else {
+ extra_asmflags = ""
+ }
+
+ if (defined(invoker.extra_ldflags) && invoker.extra_ldflags != "") {
+ extra_ldflags = " " + invoker.extra_ldflags
+ } else {
+ extra_ldflags = ""
+ }
+
+ enable_linker_map = defined(invoker.enable_linker_map) &&
+ invoker.enable_linker_map && generate_linker_map
+
+ # These library switches can apply to all tools below.
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
+
+ # Object files go in this directory.
+ object_subdir = "{{source_out_dir}}/{{label_name}}"
+
+ tool("cc") {
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}${extra_cppflags}${extra_cflags} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("cxx") {
+ depfile = "{{output}}.d"
+ command = "$cxx -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}${extra_cppflags}${extra_cxxflags} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("asm") {
+ # For GCC we can just use the C compiler to compile assembly.
+ depfile = "{{output}}.d"
+ command = "$asm -MMD -MF $depfile ${rebuild_string}{{defines}} {{include_dirs}} {{asmflags}}${extra_asmflags} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "ASM {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("alink") {
+ if (current_os == "aix") {
+ # AIX does not support either -D (deterministic output) or response
+ # files.
+ command = "$ar -X64 {{arflags}} -r -c -s {{output}} {{inputs}}"
+ } else {
+ rspfile = "{{output}}.rsp"
+ rspfile_content = "{{inputs}}"
+ command = "\"$ar\" {{arflags}} -r -c -s -D {{output}} @\"$rspfile\""
+ }
+
+ # Remove the output file first so that ar doesn't try to modify the
+ # existing file.
+ if (host_os == "win") {
+ tool_wrapper_path =
+ rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+ command = "cmd /c $python_path $tool_wrapper_path delete-file {{output}} && $command"
+ } else {
+ command = "rm -f {{output}} && $command"
+ }
+
+ # Almost all targets build with //build/config/compiler:thin_archive which
+ # adds -T to arflags.
+ description = "AR {{output}}"
+ outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ]
+
+ # Shared libraries go in the target out directory by default so we can
+ # generate different targets with the same name and not have them collide.
+ default_output_dir = "{{target_out_dir}}"
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ }
+
+ tool("solink") {
+ soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
+ sofile = "{{output_dir}}/$soname" # Possibly including toolchain dir.
+ rspfile = sofile + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ is_mingw_link = false
+ if (invoker_toolchain_args.current_os == "mingw") {
+ is_mingw_link = true
+ libname = "{{target_output_name}}.lib"
+ libfile = "{{output_dir}}/$libname"
+ }
+
+ if (defined(invoker.strip)) {
+ unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$sofile"
+ } else {
+ unstripped_sofile = sofile
+ }
+
+ link_command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" @\"$rspfile\""
+ if (!is_mingw_link) {
+ link_command = "$link_command -Wl,-soname=\"$soname\""
+ } else {
+ link_command = "$link_command -Wl,--out-implib,{{root_out_dir}}/lib.unstripped/$libfile"
+ }
+
+ # Generate a map file to be used for binary size analysis.
+ # Map file adds ~10% to the link time on a z620.
+ map_switch = ""
+ if (enable_linker_map && is_official_build) {
+ map_file = "$unstripped_sofile.map.gz"
+ map_switch = " --map-file \"$map_file\""
+ }
+
+ assert(defined(readelf), "to solink you must have a readelf")
+ assert(defined(nm), "to solink you must have an nm")
+ strip_switch = ""
+ if (defined(invoker.strip)) {
+ strip_switch = "--strip=${invoker.strip} "
+ }
+
+ # This needs a Python script to avoid using a complex shell command
+ # requiring sh control structures, pipelines, and POSIX utilities.
+ # The host might not have a POSIX shell and utilities (e.g. Windows).
+ solink_wrapper =
+ rebase_path("//build/toolchain/gcc_solink_wrapper.py", root_build_dir)
+ command = "$python_path \"$solink_wrapper\" --readelf=\"$readelf\" --nm=\"$nm\" $strip_switch --sofile=\"$unstripped_sofile\" $map_switch --output=\"$sofile\""
+ if (is_mingw_link) {
+ command = "$command --libfile=\"$libfile\""
+ }
+ command = "$command -- $link_command"
+
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+ description = "SOLINK $sofile"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = default_shlib_extension
+
+ default_output_dir = "{{root_out_dir}}"
+
+ output_prefix = "lib"
+
+ # Since the above commands only updates the .TOC file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # Tell GN about the output files. It will link to the sofile
+ outputs = [ sofile ]
+ if (sofile != unstripped_sofile) {
+ outputs += [ unstripped_sofile ]
+ if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+ invoker.use_unstripped_as_runtime_outputs) {
+ runtime_outputs = [ unstripped_sofile ]
+ }
+ }
+ if (defined(map_file)) {
+ outputs += [ map_file ]
+ }
+
+ if (is_mingw_link) {
+ outputs += [ libfile ]
+ link_output = libfile
+ depend_output = libfile
+ } else {
+ link_output = sofile
+ depend_output = sofile
+ }
+ }
+
+ tool("solink_module") {
+ soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
+ sofile = "{{output_dir}}/$soname"
+ rspfile = sofile + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ if (defined(invoker.strip)) {
+ unstripped_sofile = "{{root_out_dir}}/lib.unstripped/$sofile"
+ } else {
+ unstripped_sofile = sofile
+ }
+
+ command = "$ld -shared {{ldflags}}${extra_ldflags} -o \"$unstripped_sofile\" -Wl,-soname=\"$soname\" @\"$rspfile\""
+
+ if (defined(invoker.strip)) {
+ strip_command = "${invoker.strip} -o \"$sofile\" \"$unstripped_sofile\""
+ command += " && " + strip_command
+ }
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+ description = "SOLINK_MODULE $sofile"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ if (defined(invoker.loadable_module_extension)) {
+ default_output_extension = invoker.loadable_module_extension
+ } else {
+ default_output_extension = default_shlib_extension
+ }
+
+ default_output_dir = "{{root_out_dir}}"
+
+ output_prefix = "lib"
+
+ outputs = [ sofile ]
+ if (sofile != unstripped_sofile) {
+ outputs += [ unstripped_sofile ]
+ if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+ invoker.use_unstripped_as_runtime_outputs) {
+ runtime_outputs = [ unstripped_sofile ]
+ }
+ }
+ }
+
+ tool("link") {
+ exename = "{{target_output_name}}{{output_extension}}"
+ outfile = "{{output_dir}}/$exename"
+ rspfile = "$outfile.rsp"
+ unstripped_outfile = outfile
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = default_executable_extension
+
+ default_output_dir = "{{root_out_dir}}"
+
+ if (defined(invoker.strip)) {
+ unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$outfile"
+ }
+
+ # Generate a map file to be used for binary size analysis.
+ # Map file adds ~10% to the link time on a z620.
+ map_switch = ""
+ if (enable_linker_map && is_official_build) {
+ map_file = "$unstripped_outfile.map.gz"
+ map_switch = " --map-file \"$map_file\""
+ }
+
+ start_group_flag = ""
+ end_group_flag = ""
+ if (current_os != "aix") {
+ # the "--start-group .. --end-group" feature isn't available on the aix ld.
+ start_group_flag = "-Wl,--start-group"
+ end_group_flag = "-Wl,--end-group "
+ }
+ _clang_rt_dso_full_path = ""
+ if (is_asan && invoker_toolchain_args.current_os == "ohos") {
+ if (invoker_toolchain_args.current_cpu == "arm64") {
+ _clang_rt_dso_full_path = rebase_path(
+ "$clang_base_path/lib/clang/$clang_version/lib/aarch64-linux-ohosmusl/libclang_rt.asan.so",
+ root_build_dir)
+ } else {
+ _clang_rt_dso_full_path = rebase_path(
+ "$clang_base_path/lib/clang/$clang_version/lib/arm-linux-ohosmusl/libclang_rt.asan.so",
+ root_build_dir)
+ }
+ }
+ link_command = "$ld {{ldflags}}${extra_ldflags} -o \"$unstripped_outfile\" $libs_section_prefix $start_group_flag $_clang_rt_dso_full_path @\"$rspfile\" {{solibs}} {{libs}} $end_group_flag $libs_section_postfix"
+
+ strip_switch = ""
+
+ if (defined(invoker.strip)) {
+ strip_switch = " --strip=\"${invoker.strip}\" --unstripped-file=\"$unstripped_outfile\""
+ }
+ if (is_asan && invoker_toolchain_args.current_os == "ohos") {
+ strip_switch =
+ "$strip_switch --clang_rt_dso_path=\"$_clang_rt_dso_full_path\""
+ }
+
+ link_wrapper =
+ rebase_path("//build/toolchain/gcc_link_wrapper.py", root_build_dir)
+ command = "$python_path \"$link_wrapper\" --output=\"$outfile\"$strip_switch$map_switch -- $link_command"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs}}"
+ outputs = [ outfile ]
+ if (outfile != unstripped_outfile) {
+ outputs += [ unstripped_outfile ]
+ if (defined(invoker.use_unstripped_as_runtime_outputs) &&
+ invoker.use_unstripped_as_runtime_outputs) {
+ runtime_outputs = [ unstripped_outfile ]
+ }
+ }
+ if (defined(invoker.link_outputs)) {
+ outputs += invoker.link_outputs
+ }
+ if (defined(map_file)) {
+ outputs += [ map_file ]
+ }
+ }
+
+ # These two are really entirely generic, but have to be repeated in
+ # each toolchain because GN doesn't allow a template to be used here.
+ # See //build/toolchain/toolchain.gni for details.
+ tool("stamp") {
+ command = stamp_command
+ description = stamp_description
+ }
+ tool("copy") {
+ command = copy_command
+ description = copy_description
+ }
+
+ tool("action") {
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+
+ forward_variables_from(invoker, [ "deps" ])
+ }
+}
+
+# This is a shorthand for gcc_toolchain instances based on the Chromium-built
+# version of Clang. Only the toolchain_cpu and toolchain_os variables need to
+# be specified by the invoker, and optionally toolprefix if it's a
+# cross-compile case. Note that for a cross-compile case this toolchain
+# requires a config to pass the appropriate -target option, or else it will
+# actually just be doing a native compile. The invoker can optionally override
+# use_gold too.
+template("clang_toolchain") {
+ if (defined(invoker.toolprefix)) {
+ toolprefix = invoker.toolprefix
+ } else {
+ toolprefix = ""
+ }
+
+ gcc_toolchain(target_name) {
+ prefix = rebase_path("$clang_base_path/bin", root_build_dir)
+ cc = "$prefix/clang"
+ cxx = "$prefix/clang++"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ ar = "${prefix}/llvm-ar"
+ nm = "${toolprefix}nm"
+
+ forward_variables_from(invoker,
+ [
+ "strip",
+ "is_clang_analysis_supported",
+ "enable_linker_map",
+ "use_unstripped_as_runtime_outputs",
+ ])
+
+ toolchain_args = {
+ if (defined(invoker.toolchain_args)) {
+ forward_variables_from(invoker.toolchain_args, "*")
+ }
+ is_clang = true
+ }
+
+ if (defined(invoker.shlib_extension) && invoker.shlib_extension != "") {
+ shlib_extension = invoker.shlib_extension
+ }
+ }
+}
diff --git a/dsoftbus/build/toolchain/get_concurrent_links.py b/dsoftbus/build/toolchain/get_concurrent_links.py
new file mode 100755
index 0000000000000000000000000000000000000000..ab0e557a1faf41b920e192051833aa593d5b8bbd
--- /dev/null
+++ b/dsoftbus/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computes the number of concurrent links we want to run
+# in the build as a function of machine spec. It's based
+# on GetDefaultConcurrentLinks in GYP.
+
+import multiprocessing
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+
+def _get_total_memory_in_bytes():
+ if sys.platform in ('win32', 'cygwin'):
+ import ctypes
+
+ class MEMORYSTATUSEX(ctypes.Structure):
+ _fields_ = [
+ ("dwLength", ctypes.c_ulong),
+ ("dwMemoryLoad", ctypes.c_ulong),
+ ("ullTotalPhys", ctypes.c_ulonglong),
+ ("ullAvailPhys", ctypes.c_ulonglong),
+ ("ullTotalPageFile", ctypes.c_ulonglong),
+ ("ullAvailPageFile", ctypes.c_ulonglong),
+ ("ullTotalVirtual", ctypes.c_ulonglong),
+ ("ullAvailVirtual", ctypes.c_ulonglong),
+ ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+ ]
+
+ stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
+ ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+ return stat.ullTotalPhys
+ elif sys.platform.startswith('linux'):
+ if os.path.exists("/proc/meminfo"):
+ with open("/proc/meminfo") as meminfo:
+ memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+ for line in meminfo:
+ match = memtotal_re.match(line)
+ if not match:
+ continue
+ return float(match.group(1)) * 2 ** 10
+ elif sys.platform == 'darwin':
+ try:
+ return int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+ except Exception:
+ return 0
+ return 0
+
+
+def _get_default_concurrent_links(mem_per_link_gb, reserve_mem_gb):
+ mem_total_bytes = _get_total_memory_in_bytes()
+ mem_total_bytes = max(0, mem_total_bytes - reserve_mem_gb * 2 ** 30)
+ num_concurrent_links = int(max(1, mem_total_bytes / mem_per_link_gb / 2 ** 30))
+ hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2 ** 32)))
+
+ try:
+ cpu_cap = multiprocessing.cpu_count()
+ except:
+ cpu_cap = 1
+
+ return min(num_concurrent_links, hard_cap, cpu_cap)
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('--mem_per_link_gb', action="store", type="int", default=8)
+ parser.add_option('--reserve_mem_gb', action="store", type="int", default=0)
+ parser.disable_interspersed_args()
+ options, _ = parser.parse_args()
+
+ print(_get_default_concurrent_links(options.mem_per_link_gb,
+ options.reserve_mem_gb))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/toolchain/get_cpu_count.py b/dsoftbus/build/toolchain/get_cpu_count.py
new file mode 100755
index 0000000000000000000000000000000000000000..28ad8ca1528d4564b7d82d980b206eeb3a56c3aa
--- /dev/null
+++ b/dsoftbus/build/toolchain/get_cpu_count.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2018 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script shows cpu count to specify capacity of action pool.
+
+import multiprocessing
+import sys
+
+
+def main():
+ try:
+ cpu_count = multiprocessing.cpu_count()
+ except:
+ cpu_count = 1
+
+ print(cpu_count)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/dsoftbus/build/toolchain/linux/BUILD.gn b/dsoftbus/build/toolchain/linux/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..1a4c4e939fd0943f998e3564b1dad13d8f9f1fb5
--- /dev/null
+++ b/dsoftbus/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,119 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+clang_toolchain("clang_arm") {
+ toolprefix = "arm-linux-gnueabihf-"
+ toolchain_args = {
+ current_cpu = "arm"
+ current_os = "linux"
+ }
+}
+
+clang_toolchain("clang_arm64") {
+ toolprefix = "aarch64-linux-gnu-"
+ toolchain_args = {
+ current_cpu = "arm64"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("arm64") {
+ toolprefix = "aarch64-linux-gnu-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+
+ ar = "${toolprefix}ar"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+
+ toolchain_args = {
+ current_cpu = "arm64"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+gcc_toolchain("arm") {
+ toolprefix = "arm-linux-gnueabihf-"
+
+ cc = "${toolprefix}gcc"
+ cxx = "${toolprefix}g++"
+
+ ar = "${toolprefix}ar"
+ ld = cxx
+ readelf = "${toolprefix}readelf"
+ nm = "${toolprefix}nm"
+
+ toolchain_args = {
+ current_cpu = "arm"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+clang_toolchain("clang_x86") {
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "linux"
+ }
+}
+
+gcc_toolchain("x86") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "linux"
+ is_clang = false
+ }
+}
+
+clang_toolchain("clang_x64") {
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ strip = rebase_path("${clang_base_path}/bin/llvm-strip", root_build_dir)
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "linux"
+ }
+ shlib_extension = ".so"
+}
+
+
+gcc_toolchain("x64") {
+ cc = "gcc"
+ cxx = "g++"
+
+ readelf = "readelf"
+ nm = "nm"
+ ar = "ar"
+ ld = cxx
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "linux"
+ is_clang = false
+ }
+}
diff --git a/dsoftbus/build/toolchain/mac/BUILD.gn b/dsoftbus/build/toolchain/mac/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..e720b803124bd5a7c725d2cf353e6d87af82673f
--- /dev/null
+++ b/dsoftbus/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,326 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/mac/mac_sdk.gni")
+import("//build/config/mac/symbols.gni")
+
+assert(host_os == "mac")
+
+import("//build/toolchain/cc_wrapper.gni")
+import("//build/toolchain/clang_static_analyzer.gni")
+import("//build/toolchain/toolchain.gni")
+
+# When implementing tools using Python scripts, a TOOL_VERSION=N env
+# variable is placed in front of the command. The N should be incremented
+# whenever the script is changed, so that the build system rebuilds all
+# edges that utilize the script. Ideally this should be changed to use
+# proper input-dirty checking, but that could be expensive. Instead, use a
+# script to get the tool scripts' modification time to use as the version.
+# This won't cause a re-generation of GN files when the tool script changes
+# but it will cause edges to be marked as dirty if the ninja files are
+# regenerated. See https://crbug.com/619083 for details. A proper fix
+# would be to have inputs to tools (https://crbug.com/621119).
+tool_versions =
+ exec_script("get_tool_mtime.py",
+ rebase_path([
+ "//build/toolchain/mac/filter_libtool.py",
+ "//build/toolchain/mac/linker_driver.py",
+ ],
+ root_build_dir),
+ "trim scope")
+
+# Shared toolchain definition. Invocations should set current_os to set the
+# build args in this definition.
+template("mac_toolchain") {
+ toolchain(target_name) {
+ if (use_system_xcode) {
+ env_wrapper = ""
+ } else {
+ env_wrapper = "export DEVELOPER_DIR=$hermetic_xcode_path; "
+ }
+
+ # When invoking this toolchain not as the default one, these args will be
+ # passed to the build. They are ignored when this is the default toolchain.
+ assert(defined(invoker.toolchain_args),
+ "Toolchains must declare toolchain_args")
+ toolchain_args = {
+ # Populate toolchain args from the invoker.
+ forward_variables_from(invoker.toolchain_args, "*")
+
+ # The host toolchain value computed by the default toolchain's setup
+ # needs to be passed through unchanged to all secondary toolchains to
+ # ensure that it's always the same, regardless of the values that may be
+ # set on those toolchains.
+ host_toolchain = host_toolchain
+ }
+
+ # Supports building with the version of clang shipped with Xcode when
+ # targeting iOS by not respecting clang_base_path.
+ if (toolchain_args.current_os == "ios" && use_xcode_clang) {
+ prefix = ""
+ } else {
+ prefix = rebase_path("$clang_base_path/bin/", root_build_dir)
+ }
+
+ _cc = "${prefix}clang"
+ _cxx = "${prefix}clang++"
+ _ar = "${prefix}llvm-ar"
+
+ # When the invoker has explicitly overridden use_goma or cc_wrapper in the
+ # toolchain args, use those values, otherwise default to the global one.
+ # This works because the only reasonable override that toolchains might
+ # supply for these values are to force-disable them.
+ if (defined(toolchain_args.cc_wrapper)) {
+ toolchain_cc_wrapper = toolchain_args.cc_wrapper
+ } else {
+ toolchain_cc_wrapper = cc_wrapper
+ }
+
+ # Compute the compiler prefix.
+ if (toolchain_cc_wrapper != "") {
+ compiler_prefix = toolchain_cc_wrapper + " "
+ } else {
+ compiler_prefix = ""
+ }
+
+ cc = compiler_prefix + _cc
+ cxx = compiler_prefix + _cxx
+ ld = _cxx
+
+ if (use_clang_static_analyzer) {
+ analyzer_wrapper =
+ rebase_path("//build/toolchain/clang_static_analyzer_wrapper.py",
+ root_build_dir) + " --mode=clang"
+ cc = analyzer_wrapper + " ${cc}"
+ cxx = analyzer_wrapper + " ${cxx}"
+ ld = cxx
+ }
+
+ linker_driver =
+ "TOOL_VERSION=${tool_versions.linker_driver} " +
+ rebase_path("//build/toolchain/mac/linker_driver.py", root_build_dir)
+
+ # On iOS, the final applications are assembled using lipo (to support fat
+ # builds). The correct flags are passed to the linker_driver.py script
+ # directly during the lipo call.
+ _save_unstripped_output = false
+
+ # Make these apply to all tools below.
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
+
+ # Object files go in this directory. Use label_name instead of
+ # target_output_name since labels will generally have no spaces and will be
+ # unique in the directory.
+ object_subdir = "{{source_out_dir}}/{{label_name}}"
+
+ if (_save_unstripped_output) {
+ _unstripped_output = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.unstripped"
+ }
+
+ tool("cc") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("cxx") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("asm") {
+ # For GCC we can just use the C compiler to compile assembly.
+ depfile = "{{output}}.d"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{asmflags}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "ASM {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("objc") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "OBJC {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("objcxx") {
+ depfile = "{{output}}.d"
+ precompiled_header_type = "gcc"
+ command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "OBJCXX {{output}}"
+ outputs = [ "$object_subdir/{{source_name_part}}.o" ]
+ }
+
+ tool("alink") {
+ rspfile = "{{output}}.rsp"
+ rspfile_content = "{{inputs}}"
+ command = "$_ar {{arflags}} -r -c -s -D {{output}} \"@$rspfile\""
+ command = "rm -f {{output}} && $command"
+ description = "AR {{output}}"
+ outputs = [ "{{output_dir}}/{{target_output_name}}{{output_extension}}" ]
+ default_output_dir = "{{target_out_dir}}"
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ }
+
+ tool("solink") {
+ dylib = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # eg
+ # "./libfoo.dylib"
+ rspfile = dylib + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # These variables are not built into GN but are helpers that implement
+ # (1) linking to produce a .dylib, (2) extracting the symbols from that
+ # file to a temporary file, (3) if the temporary file has differences from
+ # the existing .TOC file, overwrite it, otherwise, don't change it.
+ #
+ # As a special case, if the library reexports symbols from other dynamic
+ # libraries, we always update the .TOC and skip the temporary file and
+ # diffing steps, since that library always needs to be re-linked.
+ tocname = dylib + ".TOC"
+ temporary_tocname = dylib + ".tmp"
+
+ does_reexport_command = "[ ! -e \"$dylib\" -o ! -e \"$tocname\" ] || otool -l \"$dylib\" | grep -q LC_REEXPORT_DYLIB"
+
+ link_command = "$linker_driver $ld -shared "
+ if (is_component_build) {
+ link_command += " -Wl,-install_name,@rpath/\"{{target_output_name}}{{output_extension}}\" "
+ }
+ link_command += "{{ldflags}} -o \"$dylib\" -Wl,-filelist,\"$rspfile\" {{libs}} {{solibs}}"
+
+ replace_command = "if ! cmp -s \"$temporary_tocname\" \"$tocname\"; then mv \"$temporary_tocname\" \"$tocname\""
+ extract_toc_command = "{ otool -l \"$dylib\" | grep LC_ID_DYLIB -A 5; nm -gP \"$dylib\" | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+ command = "$env_wrapper if $does_reexport_command ; then $link_command && $extract_toc_command > \"$tocname\"; else $link_command && $extract_toc_command > \"$temporary_tocname\" && $replace_command ; fi; fi"
+
+ rspfile_content = "{{inputs_newline}}"
+
+ description = "SOLINK {{output}}"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_dir = "{{root_out_dir}}"
+ default_output_extension = ".dylib"
+
+ output_prefix = "lib"
+
+ # Since the above commands only updates the .TOC file when it changes, ask
+ # Ninja to check if the timestamp actually changed to know if downstream
+ # dependencies should be recompiled.
+ restat = true
+
+ # Tell GN about the output files. It will link to the dylib but use the
+ # tocname for dependency management.
+ outputs = [
+ dylib,
+ tocname,
+ ]
+ link_output = dylib
+ depend_output = tocname
+
+ if (_save_unstripped_output) {
+ outputs += [ _unstripped_output ]
+ }
+ }
+
+ tool("solink_module") {
+ sofile = "{{output_dir}}/{{target_output_name}}{{output_extension}}" # eg
+ # "./libfoo.so"
+ rspfile = sofile + ".rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ link_command = "$env_wrapper $linker_driver $ld -bundle {{ldflags}} -o \"$sofile\" -Wl,-filelist,\"$rspfile\""
+ if (is_component_build) {
+ link_command += " -Wl,-install_name,@rpath/{{target_output_name}}{{output_extension}}"
+ }
+ link_command += " {{solibs}} {{libs}}"
+ command = link_command
+
+ rspfile_content = "{{inputs_newline}}"
+
+ description = "SOLINK_MODULE {{output}}"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_dir = "{{root_out_dir}}"
+ default_output_extension = ".so"
+
+ outputs = [ sofile ]
+
+ if (_save_unstripped_output) {
+ outputs += [ _unstripped_output ]
+ }
+ }
+
+ tool("link") {
+ outfile = "{{output_dir}}/{{target_output_name}}{{output_extension}}"
+ rspfile = "$outfile.rsp"
+ pool = "//build/toolchain:link_pool($default_toolchain)"
+
+ # Note about -filelist: Apple's linker reads the file list file and
+ # interprets each newline-separated chunk of text as a file name. It
+ # doesn't do the things one would expect from the shell like unescaping
+ # or handling quotes. In contrast, when Ninja finds a file name with
+ # spaces, it single-quotes them in $inputs_newline as it would normally
+ # do for command-line arguments. Thus any source names with spaces, or
+ # label names with spaces (which GN bases the output paths on) will be
+ # corrupted by this process. Don't use spaces for source files or labels.
+ command = "$env_wrapper $linker_driver $ld {{ldflags}} -o \"$outfile\" -Wl,-filelist,\"$rspfile\" {{solibs}} {{libs}}"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs_newline}}"
+ outputs = [ outfile ]
+
+ if (_save_unstripped_output) {
+ outputs += [ _unstripped_output ]
+ }
+
+ default_output_dir = "{{root_out_dir}}"
+ }
+
+ # These two are really entirely generic, but have to be repeated in
+ # each toolchain because GN doesn't allow a template to be used here.
+ # See //build/toolchain/toolchain.gni for details.
+ tool("stamp") {
+ command = stamp_command
+ description = stamp_description
+ }
+ tool("copy") {
+ command = copy_command
+ description = copy_description
+ }
+
+ tool("action") {
+ pool = "//build/toolchain:action_pool($default_toolchain)"
+ }
+ }
+}
+
+mac_toolchain("clang_x64") {
+ toolchain_args = {
+ current_cpu = "x64"
+ current_os = "mac"
+ }
+}
+
+mac_toolchain("clang_x86") {
+ toolchain_args = {
+ current_cpu = "x86"
+ current_os = "mac"
+ }
+}
diff --git a/dsoftbus/build/toolchain/mac/filter_libtool.py b/dsoftbus/build/toolchain/mac/filter_libtool.py
new file mode 100755
index 0000000000000000000000000000000000000000..bf7d611ebeebe19876c39949fb1370a101ff8186
--- /dev/null
+++ b/dsoftbus/build/toolchain/mac/filter_libtool.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import subprocess
+import sys
+
+# This script executes libtool and filters out logspam lines like:
+# '/path/to/libtool: file: foo.o has no symbols'
+BLOCKLIST_PATTERNS = map(re.compile, [
+ r'^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$',
+ r'^.*libtool: warning for library: .* the table of contents is empty '
+ r'\(no object file members in the library define global symbols\)$',
+ r'^.*libtool: warning same member name \(\S*\) in output file used for '
+ r'input files: \S* and: \S* \(due to use of basename, truncation, '
+ r'blank padding or duplicate input files\)$',
+])
+
+
+def is_blocklisted_line(line):
+ """Returns whether the line should be filtered out."""
+ for pattern in BLOCKLIST_PATTERNS:
+ if isinstance(line, bytes):
+ line = line.decode()
+ if pattern.match(line):
+ return True
+ return False
+
+
+def main(cmd_list):
+ env = os.environ.copy()
+ # The problem with this flag is that it resets the file mtime on the file to
+ # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone.
+ env['ZERO_AR_DATE'] = '1'
+ libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
+ _, err = libtoolout.communicate()
+ for line in err.splitlines():
+ if not is_blocklisted_line(line):
+ print(line, file=sys.stderr)
+ # Unconditionally touch the output .a file on the command line if present
+ # and the command succeeded. A bit hacky.
+ if not libtoolout.returncode:
+ for i in range(len(cmd_list) - 1):
+ if cmd_list[i] == '-o' and cmd_list[i + 1].endswith('.a'):
+ os.utime(cmd_list[i + 1], None)
+ break
+ return libtoolout.returncode
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/toolchain/mac/get_tool_mtime.py b/dsoftbus/build/toolchain/mac/get_tool_mtime.py
new file mode 100755
index 0000000000000000000000000000000000000000..4d81983929cdf03c01d9070291b9cfed17cb660b
--- /dev/null
+++ b/dsoftbus/build/toolchain/mac/get_tool_mtime.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+# Usage: python get_tool_mtime.py path/to/file1.py path/to/file2.py
+#
+# Prints a GN scope with the variable name being the basename sans-extension
+# and the value being the file modification time. A variable is emitted for
+# each file argument on the command line.
+
+if __name__ == '__main__':
+ for f in sys.argv[1:]:
+ variable = os.path.splitext(os.path.basename(f))[0]
+ print('%s = %d' % (variable, os.path.getmtime(f)))
diff --git a/dsoftbus/build/toolchain/mac/linker_driver.py b/dsoftbus/build/toolchain/mac/linker_driver.py
new file mode 100755
index 0000000000000000000000000000000000000000..c0ca303b3b13c9b72343c237166d7c7617faad02
--- /dev/null
+++ b/dsoftbus/build/toolchain/mac/linker_driver.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+
+"""
+The linker_driver.py is responsible for forwarding a linker invocation to
+the compiler driver, while processing special arguments itself.
+
+Usage: linker_driver.py clang++ main.o -L. -llib -o prog -Wcrl,dsym,out
+
+On Mac, the logical step of linking is handled by three discrete tools to
+perform the image link, debug info link, and strip. The linker_driver.py
+combines these three steps into a single tool.
+
+The command passed to the linker_driver.py should be the compiler driver
+invocation for the linker. It is first invoked unaltered (except for the
+removal of the special driver arguments, described below). Then the driver
+performs additional actions, based on these arguments:
+
+ -Wcrl,dsym,
+ After invoking the linker, this will run `dsymutil` on the linker's
+ output, producing a dSYM bundle, stored at dsym_path_prefix. As an
+ example, if the linker driver were invoked with:
+ "... -o out/gn/obj/foo/libbar.dylib ... -Wcrl,dsym,out/gn ..."
+ The resulting dSYM would be out/gn/libbar.dylib.dSYM/.
+
+ -Wcrl,unstripped,
+ After invoking the linker, and before strip, this will save a copy of
+ the unstripped linker output in the directory unstripped_path_prefix.
+
+ -Wcrl,strip,
+ After invoking the linker, and optionally dsymutil, this will run
+ the strip command on the linker's output. strip_arguments are
+ comma-separated arguments to be passed to the strip command.
+"""
+def main(args):
+ """main function for the linker driver. Separates out the arguments for
+ the main compiler driver and the linker driver, then invokes all the
+ required tools.
+
+ Args:
+ args: list of string, Arguments to the script.
+ """
+
+ if len(args) < 2:
+ raise RuntimeError("Usage: linker_driver.py [linker-invocation]")
+
+ for i in range(len(args)):
+ if args[i] != '--developer_dir':
+ continue
+ os.environ['DEVELOPER_DIR'] = args[i + 1]
+ del args[i:i + 2]
+ break
+
+ # Collect arguments to the linker driver (this script) and remove them from
+ # the arguments being passed to the compiler driver.
+ linker_driver_actions = {}
+ compiler_driver_args = []
+ for arg in args[1:]:
+ if arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+ # Convert driver actions into a map of name => lambda to invoke.
+ driver_action = process_linker_driver_arg(arg)
+ assert driver_action[0] not in linker_driver_actions
+ linker_driver_actions[driver_action[0]] = driver_action[1]
+ else:
+ compiler_driver_args.append(arg)
+
+ linker_driver_outputs = [_find_linker_output(compiler_driver_args)]
+
+ try:
+ # Run the linker by invoking the compiler driver.
+ subprocess.check_call(compiler_driver_args)
+
+ # Run the linker driver actions, in the order specified by the actions list.
+ for action in _LINKER_DRIVER_ACTIONS:
+ name = action[0]
+ if name in linker_driver_actions:
+ linker_driver_outputs += linker_driver_actions[name](args)
+ except:
+ # If a linker driver action failed, remove all the outputs to make the
+ # build step atomic.
+ map(_remove_path, linker_driver_outputs)
+
+ # Re-report the original failure.
+ raise
+
+
+def process_linker_driver_arg(arg):
+ """Processes a linker driver argument and returns a tuple containing the
+ name and unary lambda to invoke for that linker driver action.
+
+ Args:
+ arg: string, The linker driver argument.
+
+ Returns:
+ A 2-tuple:
+ 0: The driver action name, as in _LINKER_DRIVER_ACTIONS.
+ 1: An 1-ary lambda that takes the full list of arguments passed to
+ main(). The lambda should call the linker driver action that
+ corresponds to the argument and return a list of outputs from the
+ action.
+ """
+ if not arg.startswith(_LINKER_DRIVER_ARG_PREFIX):
+ raise ValueError('%s is not a linker driver argument' % (arg,))
+
+ sub_arg = arg[len(_LINKER_DRIVER_ARG_PREFIX):]
+
+ for driver_action in _LINKER_DRIVER_ACTIONS:
+ (name, action) = driver_action
+ if sub_arg.startswith(name):
+ return (name,
+ lambda full_args: action(sub_arg[len(name):], full_args))
+
+ raise ValueError('Unknown linker driver argument: %s' % (arg,))
+
+
+def run_dsym_util(dsym_path_prefix, full_args):
+ """Linker driver action for -Wcrl,dsym,. Invokes dsymutil
+ on the linker's output and produces a dsym file at |dsym_file| path.
+
+ Args:
+ dsym_path_prefix: string, The path at which the dsymutil output should be
+ located.
+ full_args: list of string, Full argument list for the linker driver.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ if not len(dsym_path_prefix):
+ raise ValueError('Unspecified dSYM output file')
+
+ linker_out = _find_linker_output(full_args)
+ base = os.path.basename(linker_out)
+ dsym_out = os.path.join(dsym_path_prefix, base + '.dSYM')
+
+ # Remove old dSYMs before invoking dsymutil.
+ _remove_path(dsym_out)
+ subprocess.check_call(['xcrun', 'dsymutil', '-o', dsym_out, linker_out])
+ return [dsym_out]
+
+
+def run_save_unstripped(unstripped_path_prefix, full_args):
+ """Linker driver action for -Wcrl,unstripped,. Copies
+ the linker output to |unstripped_path_prefix| before stripping.
+
+ Args:
+ unstripped_path_prefix: string, The path at which the unstripped output
+ should be located.
+ full_args: list of string, Full argument list for the linker driver.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ if not len(unstripped_path_prefix):
+ raise ValueError('Unspecified unstripped output file')
+
+ linker_out = _find_linker_output(full_args)
+ base = os.path.basename(linker_out)
+ unstripped_out = os.path.join(unstripped_path_prefix, base + '.unstripped')
+
+ shutil.copyfile(linker_out, unstripped_out)
+ return [unstripped_out]
+
+
+def run_strip(strip_args_string, full_args):
+ """Linker driver action for -Wcrl,strip,.
+
+ Args:
+ strip_args_string: string, Comma-separated arguments for `strip`.
+ full_args: list of string, Full arguments for the linker driver.
+
+ Returns:
+ list of string, Build step outputs.
+ """
+ strip_command = ['xcrun', 'strip']
+ if len(strip_args_string) > 0:
+ strip_command += strip_args_string.split(',')
+ strip_command.append(_find_linker_output(full_args))
+ subprocess.check_call(strip_command)
+ return []
+
+
+def _find_linker_output(full_args):
+ """Finds the output of the linker by looking for the output flag in its
+ argument list. As this is a required linker argument, raises an error if it
+ cannot be found.
+ """
+ # The linker_driver.py script may be used to wrap either the compiler linker
+ # (uses -o to configure the output) or lipo (uses -output to configure the
+ # output). Since wrapping the compiler linker is the most likely possibility
+ # use try/except and fallback to checking for -output if -o is not found.
+ try:
+ output_flag_index = full_args.index('-o')
+ except ValueError:
+ output_flag_index = full_args.index('-output')
+ return full_args[output_flag_index + 1]
+
+
+def _remove_path(path):
+ """Removes the file or directory at |path| if it exists."""
+ if os.path.exists(path):
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ else:
+ os.unlink(path)
+
+
+_LINKER_DRIVER_ARG_PREFIX = '-Wcrl,'
+
+"""List of linker driver actions. The sort order of this list affects the
+order in which the actions are invoked. The first item in the tuple is the
+argument's -Wcrl, and the second is the function to invoke.
+"""
+_LINKER_DRIVER_ACTIONS = [
+ ('dsym,', run_dsym_util),
+ ('unstripped,', run_save_unstripped),
+ ('strip,', run_strip),
+]
+
+if __name__ == '__main__':
+ main(sys.argv)
+ sys.exit(0)
diff --git a/dsoftbus/build/toolchain/mingw/BUILD.gn b/dsoftbus/build/toolchain/mingw/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..098074ff72a07dd92bcdfd294f620daf51ba508e
--- /dev/null
+++ b/dsoftbus/build/toolchain/mingw/BUILD.gn
@@ -0,0 +1,57 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("//build/toolchain/gcc_toolchain.gni")
+declare_args() {
+ # Whether unstripped binaries, i.e. compiled with debug symbols, should be
+ # considered runtime_deps rather than stripped ones.
+ mingw_unstripped_runtime_outputs = true
+}
+
+template("mingw_toolchain") {
+ gcc_toolchain(target_name) {
+ assert(defined(invoker.toolchain_root),
+ "toolchain_root must be defined for mingw_toolchain.")
+ assert(defined(invoker.toolchain_args),
+ "toolchain_args must be defined for mingw_toolchain.")
+ toolchain_args = invoker.toolchain_args
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ _mingw_tool_prefix =
+ rebase_path("${invoker.toolchain_root}/bin", root_build_dir)
+
+ cc = "${_mingw_tool_prefix}/clang"
+ cxx = "${_mingw_tool_prefix}/clang++"
+ ar = "${_mingw_tool_prefix}/llvm-ar"
+ ld = cxx
+ readelf = "${_mingw_tool_prefix}/llvm-readelf"
+ nm = "${_mingw_tool_prefix}/llvm-nm"
+ strip = "${_mingw_tool_prefix}/llvm-strip"
+ use_unstripped_as_runtime_outputs = mingw_unstripped_runtime_outputs
+
+ executable_extension = ".exe"
+ shlib_extension = ".dll"
+ }
+}
+
+mingw_toolchain("mingw_x86_64") {
+ toolchain_root = "//prebuilts/mingw-w64/ohos/linux-x86_64/clang-mingw"
+ toolchain_args = {
+ current_cpu = "x86_64"
+ current_os = "mingw"
+ use_custom_libcxx = false
+ is_clang = true
+ }
+}
diff --git a/dsoftbus/build/toolchain/ohos/BUILD.gn b/dsoftbus/build/toolchain/ohos/BUILD.gn
new file mode 100755
index 0000000000000000000000000000000000000000..b7b6dde6f33d74a8f6ce7ef3e1f86f400cd6c75f
--- /dev/null
+++ b/dsoftbus/build/toolchain/ohos/BUILD.gn
@@ -0,0 +1,73 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/clang/clang.gni")
+import("//build/config/ohos/config.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+ # Whether unstripped binaries, i.e. compiled with debug symbols, should be
+ # considered runtime_deps rather than stripped ones.
+ ohos_unstripped_runtime_outputs = true
+}
+
+# The ohos clang toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+# - toolchain_root
+# Path to cpu-specific toolchain within the ndk.
+# - sysroot
+# Sysroot for this architecture.
+# - lib_dir
+# Subdirectory inside of sysroot where libs go.
+# - binary_prefix
+# Prefix of compiler executables.
+template("ohos_clang_toolchain") {
+ gcc_toolchain(target_name) {
+ assert(defined(invoker.toolchain_args),
+ "toolchain_args must be defined for ohos_clang_toolchain()")
+ toolchain_args = invoker.toolchain_args
+ toolchain_args.current_os = "ohos"
+
+ # Output linker map files for binary size analysis.
+ enable_linker_map = true
+
+ ohos_libc_dir =
+ rebase_path(invoker.sysroot + "/" + invoker.lib_dir, root_build_dir)
+ libs_section_prefix = "${ohos_libc_dir}/Scrt1.o"
+ libs_section_prefix += " ${ohos_libc_dir}/crti.o"
+ libs_section_postfix = "${ohos_libc_dir}/crtn.o"
+
+ _prefix = rebase_path("${clang_base_path}/bin", root_build_dir)
+ cc = "${_prefix}/clang"
+ cxx = "${_prefix}/clang++"
+ ar = "${_prefix}/llvm-ar"
+ ld = cxx
+ readelf = "${_prefix}/llvm-readobj"
+ nm = "${_prefix}/llvm-nm"
+ strip = rebase_path("${clang_base_path}/bin/llvm-strip", root_build_dir)
+ use_unstripped_as_runtime_outputs = ohos_unstripped_runtime_outputs
+
+ # Don't use .cr.so for loadable_modules since they are always loaded via
+ # absolute path.
+ loadable_module_extension = ".so"
+ }
+}
+
+ohos_clang_toolchain("ohos_clang_arm") {
+ sysroot = "${musl_sysroot}"
+ lib_dir = "usr/lib/arm-linux-ohosmusl"
+ toolchain_args = {
+ current_cpu = "arm"
+ }
+}
+
+ohos_clang_toolchain("ohos_clang_arm64") {
+ sysroot = "${musl_sysroot}"
+ lib_dir = "usr/lib/aarch64-linux-ohosmusl"
+ toolchain_args = {
+ current_cpu = "arm64"
+ }
+}
diff --git a/dsoftbus/build/toolchain/toolchain.gni b/dsoftbus/build/toolchain/toolchain.gni
new file mode 100755
index 0000000000000000000000000000000000000000..3f9b4d6c05dc0bbd418bcbb7a6aab606494d04e0
--- /dev/null
+++ b/dsoftbus/build/toolchain/toolchain.gni
@@ -0,0 +1,102 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Toolchain-related configuration that may be needed outside the context of the
+# toolchain() rules themselves.
+
+import("//build/misc/overrides/build.gni")
+
+declare_args() {
+ # If this is set to true, or if LLVM_FORCE_HEAD_REVISION is set to 1
+ # in the environment, we use the revision in the llvm repo to determine
+ # the CLANG_REVISION to use, instead of the version hard-coded into
+ # //tools/clang/scripts/update.py. This should only be used in
+ # conjunction with setting LLVM_FORCE_HEAD_REVISION in the
+ # environment when `gclient runhooks` is run as well.
+ llvm_force_head_revision = false
+
+ # Compile with Xcode version of clang instead of hermetic version shipped
+ # with the build. Used on iOS to ship official builds (as they are built
+ # with the version of clang shipped with Xcode).
+ use_xcode_clang = is_ios && is_official_build
+
+ # Used for binary size analysis.
+ # Currently disabled on LLD because of a bug (fixed upstream).
+ # See https://crbug.com/716209.
+ generate_linker_map = is_ohos && is_official_build
+
+ # Use absolute file paths in the compiler diagnostics and __FILE__ macro
+ # if needed.
+ msvc_use_absolute_paths = false
+}
+
+if (generate_linker_map) {
+ assert(
+ is_official_build,
+ "Linker map files should only be generated when is_official_build = true")
+ assert(current_os == "ohos" || target_os == "linux",
+ "Linker map files should only be generated for ohos and Linux")
+}
+
+# The path to the hermetic install of Xcode. Only relevant when
+hermetic_xcode_path =
+ rebase_path("//build/${target_os}_files/Xcode.app", "", root_build_dir)
+
+declare_args() {
+ if (is_clang) {
+ # Clang compiler version. Clang files are placed at version-dependent paths.
+ clang_version = "10.0.1"
+ }
+ use_custom_clang = false
+}
+
+# Check target_os here instead of is_ios as this file is loaded for secondary
+# toolchain (host toolchain in particular) but the argument is the same for
+# all toolchains.
+assert(!use_xcode_clang || target_os == "ios",
+ "Using Xcode's clang is only supported in iOS builds")
+
+# Extension for shared library files (including leading dot).
+executable_extension = ""
+if (is_mac || is_ios) {
+ shlib_extension = ".dylib"
+} else if (is_ohos && is_component_build) {
+ # By appending .z, we prevent name collisions with libraries already loaded by the ohos.
+ shlib_extension = ".z.so"
+} else if (is_mingw) {
+ shlib_extension = ".dll"
+ executable_extension = ".exe"
+} else if (is_posix) {
+ shlib_extension = ".so"
+} else if (is_win) {
+ shlib_extension = ".dll"
+} else {
+ assert(false, "Platform not supported")
+}
+
+# Prefix for shared library files.
+if (is_posix) {
+ shlib_prefix = "lib"
+} else {
+ shlib_prefix = ""
+}
+
+# While other "tool"s in a toolchain are specific to the target of that
+# toolchain, the "stamp" and "copy" tools are really generic to the host;
+# but each toolchain must define them separately. GN doesn't allow a
+# template instantiation inside a toolchain definition, so some boilerplate
+# has to be repeated in each toolchain to define these two tools. These
+# four variables reduce the duplication in that boilerplate.
+stamp_description = "STAMP {{output}}"
+copy_description = "COPY {{source}} {{output}}"
+if (host_os == "win") {
+ _tool_wrapper_path =
+ rebase_path("//build/toolchain/win/tool_wrapper.py", root_build_dir)
+ stamp_command = "cmd /c type nul > \"{{output}}\""
+ copy_command =
+ "$python_path $_tool_wrapper_path recursive-mirror {{source}} {{output}}"
+} else {
+ stamp_command = "touch {{output}}"
+ copy_command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+}
diff --git a/dsoftbus/build/toolchain/wrapper_utils.py b/dsoftbus/build/toolchain/wrapper_utils.py
new file mode 100755
index 0000000000000000000000000000000000000000..803f99aa75c28b4b6b135e10e7719fc9f179018a
--- /dev/null
+++ b/dsoftbus/build/toolchain/wrapper_utils.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions for gcc_toolchain.gni wrappers."""
+
+import gzip
+import os
+import subprocess
+import shutil
+import threading
+
+_BAT_PREFIX = 'cmd /c call '
+
+
+def _gzip_then_delete(src_path, dest_path):
+ """ Results for ohos map file with GCC on a z620:
+ Uncompressed: 207MB
+ gzip -9: 16.4MB, takes 8.7 seconds.
+ gzip -1: 21.8MB, takes 2.0 seconds.
+ Piping directly from the linker via -print-map (or via -Map with a fifo)
+ adds a whopping 30-45 seconds!
+ """
+ with open(src_path, 'rb') as f_in, gzip.GzipFile(dest_path,
+ 'wb',
+ 1) as f_out:
+ shutil.copyfileobj(f_in, f_out)
+ os.unlink(src_path)
+
+
+def command_to_run(command):
+ """Generates commands compatible with Windows.
+
+ When running on a Windows host and using a toolchain whose tools are
+ actually wrapper scripts (i.e. .bat files on Windows) rather than binary
+ executables, the |command| to run has to be prefixed with this magic.
+ The GN toolchain definitions take care of that for when GN/Ninja is
+ running the tool directly. When that command is passed in to this
+ script, it appears as a unitary string but needs to be split up so that
+ just 'cmd' is the actual command given to Python's subprocess module.
+
+ Args:
+ command: List containing the UNIX style |command|.
+
+ Returns:
+ A list containing the Windows version of the |command|.
+ """
+ if command[0].startswith(_BAT_PREFIX):
+ command = command[0].split(None, 3) + command[1:]
+ return command
+
+
+def run_link_with_optional_map_file(command, env=None, map_file=None):
+ """Runs the given command, adding in -Wl,-Map when |map_file| is given.
+
+ Also takes care of gzipping when |map_file| ends with .gz.
+
+ Args:
+ command: List of arguments comprising the command.
+ env: Environment variables.
+ map_file: Path to output map_file.
+
+ Returns:
+ The exit code of running |command|.
+ """
+ tmp_map_path = None
+ if map_file and map_file.endswith('.gz'):
+ tmp_map_path = map_file + '.tmp'
+ command.append('-Wl,-Map,' + tmp_map_path)
+ elif map_file:
+ command.append('-Wl,-Map,' + map_file)
+
+ result = subprocess.call(command, env=env)
+
+ if tmp_map_path and result == 0:
+ threading.Thread(
+ target=lambda: _gzip_then_delete(tmp_map_path, map_file)).start()
+ elif tmp_map_path and os.path.exists(tmp_map_path):
+ os.unlink(tmp_map_path)
+
+ return result
+
+
+def capture_command_stderr(command, env=None):
+ """Returns the stderr of a command.
+
+ Args:
+ command: A list containing the command and arguments.
+ env: Environment variables for the new process.
+ """
+ child = subprocess.Popen(command, stderr=subprocess.PIPE, env=env)
+ _, stderr = child.communicate()
+ return child.returncode, stderr
diff --git a/dsoftbus/build/tools/check_deps/README.md b/dsoftbus/build/tools/check_deps/README.md
new file mode 100755
index 0000000000000000000000000000000000000000..206b65078ac821f798e94961cf1488a3ddcf8a1b
--- /dev/null
+++ b/dsoftbus/build/tools/check_deps/README.md
@@ -0,0 +1,42 @@
+
+
+## 错误使用的deps检查
+
+
+
+**检查原因:**
+
+在BUILD.gn中说明模块依赖的时候,需要区分部件内依赖和部件间依赖,部件内依赖在deps里,部件间依赖在external_deps里,但是有的模块的deps存在误用,deps里有部件间依赖,使用这个工具可以检查现有模块的哪些deps存在误用。
+
+**检查步骤:**
+
+1. 编译代码生成中间文件,只执行gn阶段,并打开check_deps属性
+
+ ```shell
+ ./build.sh --product-name Hi3516DV300 --ccache --gn-args pycache_enable=true --gn-args check_deps=true --build-only-gn
+ ```
+
+2. 运行检查脚本
+
+ ```shell
+ # 比如在源码根目录下执行
+ python build/tools/check_deps/check_deps.py --parts-path-file out/ohos-arm-release/build_configs/parts_info/parts_path_info.json --deps-path out/ohos-arm-release/deps_files
+ ```
+
+3. 脚本参数说明
+
+ ```
+ --deps-path # 必选 模块依赖数据文件目录,gn执行完成后输出在out/ohos-arm-release/deps_files目录下,如果不在源码目录执行,需要配置到正确的路径。
+ --parts-path-file # 必选 部件路径文件,包含每个部件的路径信息
+ ```
+
+4. 输出
+
+ 脚本运行后会在 `--deps-path` 指定的deps_files的同级目录下,创建module_deps_info文件夹输出结果数据。
+
+ ```
+ module_deps_info/wrong_used_deps.json # 生成错误使用的deps文件
+ ```
+
+**注意:**
+*工具能力在完善中,使用脚本和步骤会随时变化。*
diff --git a/dsoftbus/build/tools/check_deps/check_deps.py b/dsoftbus/build/tools/check_deps/check_deps.py
new file mode 100755
index 0000000000000000000000000000000000000000..285ea4596f3dc6b8a89cad6d25ee094cb3bb23c9
--- /dev/null
+++ b/dsoftbus/build/tools/check_deps/check_deps.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+import sys
+import argparse
+
+
+def read_json_file(input_file):
+ if not os.path.exists(input_file):
+ print("file '{}' doesn't exist.".format(input_file))
+ return None
+
+ data = None
+ try:
+ with open(input_file, 'r') as input_f:
+ data = json.load(input_f)
+ except json.decoder.JSONDecodeError:
+ print("The file '{}' format is incorrect.".format(input_file))
+ raise
+ return data
+
+
+def write_json_file(output_file, content):
+ file_dir = os.path.dirname(os.path.abspath(output_file))
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir, exist_ok=True)
+ with open(output_file, 'w') as output_f:
+ json.dump(content, output_f, indent=2)
+
+
+def get_wrong_used_deps(parts_path_file, deps_files):
+
+ print("Start check deps!")
+ part_path_data = read_json_file(parts_path_file)
+ wrong_used_deps = {}
+ not_exist_part = []
+ ignore_parts = ['unittest', 'moduletest', 'systemtest']
+
+ for filename in os.listdir(deps_files):
+ file_path = os.path.join(deps_files, filename)
+ module_deps_data = read_json_file(file_path)
+
+ part_name = module_deps_data.get("part_name")
+ deps = module_deps_data.get("deps")
+ module_label = module_deps_data.get("module_label").split("(")[0]
+ part_path = part_path_data.get(part_name, "no_part_path")
+
+ if part_name in ignore_parts:
+ continue
+ if part_path == "no_part_path":
+ if part_name not in not_exist_part:
+ print("Warning! Can not find part '{}' path".format(part_name))
+ not_exist_part.append(part_name)
+ continue
+
+ _wrong_used_deps = []
+ for dep in deps:
+ dep_path = dep[2:]
+ if not dep_path.startswith(part_path):
+ _wrong_used_deps.append(dep)
+ if len(_wrong_used_deps) > 0:
+ if part_name not in wrong_used_deps:
+ wrong_used_deps[part_name] = {module_label: _wrong_used_deps}
+ else:
+ wrong_used_deps[part_name][module_label] = _wrong_used_deps
+
+ output_file = os.path.join(os.path.dirname(
+ deps_files), "module_deps_info", "wrong_used_deps.json")
+ write_json_file(output_file, wrong_used_deps)
+ print("Check deps result output to '{}'.".format(output_file))
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--parts-path-file', required=True)
+ parser.add_argument('--deps-path', required=True)
+ args = parser.parse_args(argv)
+ get_wrong_used_deps(args.parts_path_file, args.deps_path)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/tools/module_dependence/README.md b/dsoftbus/build/tools/module_dependence/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..01b6c3c8c40c3e39bd5a74b08e82e8033575898a
--- /dev/null
+++ b/dsoftbus/build/tools/module_dependence/README.md
@@ -0,0 +1,131 @@
+
+
+## 模块依赖分析
+
+
+
+### 生成部件依赖
+
+
+
+生成步骤:
+
+1. 编译代码生成中间文件,只执行gn阶段,并打开check_deps属性
+
+ ```shell
+ ./build.sh --product-name Hi3516DV300 --ccache --gn-args pycache_enable=true --gn-args check_deps=true --build-only-gn
+ ```
+
+2. 运行依赖生成脚本
+
+ ```shell
+ # 比如在源码根目录下执行
+ build/tools/module_dependence/part_deps.py --deps-files-path out/ohos-arm-release/deps_files
+ ```
+
+3. 脚本参数说明
+
+ ```
+ --deps-files-path # 必选 模块依赖数据文件目录,gn执行完成后输出在out/ohos-arm-release/deps_files目录下,如果不在源码目录执行,需要配置到正确的路径。
+ --graph # 可选 生成部件依赖图。使用该选项,需要安装对应的库
+ ```
+
+4. 关于依赖图的生成
+
+ 如果需要生成部件依赖图,使用--graph参数指定;
+
+ 本地机器需要安装依赖库:pyecharts
+
+ ```
+ # 使用pip安装python3的依赖库
+ pip3 install pyecharts
+
+ # 如果pip3没有安装,请先安装pip3
+ sudo apt install python3-pip
+ ```
+
+5. 输出
+
+ 脚本运行后会在 `--deps-files-path` 指定的deps_files的同级目录下,创建parts_deps_info文件夹输出结果数据。
+ parts_deps_info/
+ ```
+ parts_deps_info/all_deps_data.json # 生成的模块依赖信息
+ parts_deps_info/part_deps_info.json # 生成的部件依赖
+ parts_deps_info/part-deps-grahp.html # 生成的部件依赖图,只有指定--graph参数时才生成
+ ```
+
+
+### 生成模块依赖
+
+
+
+生成步骤:
+
+1. 编译代码生成中间文件,只执行gn阶段,并打开check_deps属性
+
+ ```shell
+ ./build.sh --product-name Hi3516DV300 --ccache --gn-args pycache_enable=true --gn-args check_deps=true --build-only-gn
+ ```
+
+2. 运行依赖生成脚本
+
+ ```shell
+ # 比如在源码根目录下执行
+ build/tools/module_dependence/module_deps.py --deps-files-path out/ohos-arm-release/deps_files
+ ```
+
+3. 脚本参数说明
+
+ ```
+ --deps-files-path # 必选 模块依赖数据文件目录,gn执行完成后输出在out/ohos-arm-release/deps_files目录下,如果不在源码目录执行,需要配置到正确的路径。
+ ```
+
+4. 输出
+
+ 脚本运行后会在 `--deps-files-path` 指定的deps_files的同级目录下,创建module_deps_info文件夹输出结果数据。
+ ```
+ module_deps_info/all_deps_data.json # 生成的模块依赖信息
+ module_deps_info/module_deps_info.json # 生成的模块依赖
+ ```
+
+5. 生成单个模块的依赖树
+
+ (1) 依赖pyecharts组件,需要安装:
+
+ ```
+ pip3 install pyecharts
+ ```
+
+ (2) 生成命令:
+
+ ```
+ # 比如在源码根目录下执行
+ build/tools/module_dependence/module_deps_tree.py --module-name 部件名:模块名 --module-deps-file out/ohos-arm-release/module_deps_info/module_deps_info.json
+ ```
+
+ (3) 参数说明:
+
+ ```
+ --module-name # 必选 要生成模块的名称,结构为"部件名:模块名"
+ --module-deps-file # 必选 模块依赖信息文件module_deps_info.json所在路径
+ ```
+
+ (4) 输出:
+
+ 脚本运行后会在module_deps_info.json的同级目录下输出模块依赖树图
+
+ ```
+ module_deps_info/部件名__模块名.html
+ ```
+
+ 用浏览器打开这个文件,会展示模块依赖树,默认展开第一层依赖。
+
+ 实心点表示一个模块还有未展开的其它模块依赖,点击实心点可以展开它的依赖。
+
+ 一个模块第一次出现时显示为黑色,之后出现显示为红色。
+
+
+
+
+**注意:**
+*工具能力在完善中,使用脚本和步骤会随时变化。*
diff --git a/dsoftbus/build/tools/module_dependence/dependence_analysis.py b/dsoftbus/build/tools/module_dependence/dependence_analysis.py
new file mode 100755
index 0000000000000000000000000000000000000000..f6a264c9824f9515b4df2b5a8f09bf89b5dcf973
--- /dev/null
+++ b/dsoftbus/build/tools/module_dependence/dependence_analysis.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+import glob
+
+from file_utils import read_json_file
+
+
+def _get_external_deps_file_list(search_path):
+ search_str = "{}/**/*{}".format(search_path, "_external_deps_temp.json")
+ external_deps_file_list = glob.glob(search_str, recursive=True)
+ return external_deps_file_list
+
+
+def _read_external_deps_info(build_out_dir):
+ ext_deps_file_list = _get_external_deps_file_list(
+ os.path.join(build_out_dir, 'gen'))
+ ext_deps_file_dict = {}
+ for _external_deps_file in ext_deps_file_list:
+ if not os.path.exists(_external_deps_file):
+ raise Exception(
+ "file '{}' doesn't exist.".format(_external_deps_file))
+ module_ext_deps_info = read_json_file(_external_deps_file)
+ if module_ext_deps_info is None:
+ raise Exception(
+ "read file '{}' failed.".format(_external_deps_file))
+ _filename = os.path.basename(_external_deps_file)
+ _filename_snippet = re.search(r'(.*)_external_deps_temp.json',
+ _filename).group(1)
+ part_name, module_name = _parse_module_name(_filename_snippet)
+ module_alias = '{}:{}'.format(part_name, module_name)
+ ext_deps_file_dict[module_alias] = module_ext_deps_info
+ return ext_deps_file_dict
+
+
+def _parse_module_name(name_str):
+ split_result = name_str.split('__')
+ part_name = split_result[0]
+ module_name = split_result[1]
+ return part_name, module_name
+
+
+def _read_module_deps_info(module_deps_files_path):
+ deps_files = os.listdir(module_deps_files_path)
+ deps_data = {}
+ for _filename in deps_files:
+ _deps_file = os.path.join(module_deps_files_path, _filename)
+ if not os.path.exists(_deps_file):
+ raise Exception("file '{}' doesn't exist.".format(_deps_file))
+ module_deps_info = read_json_file(_deps_file)
+ if module_deps_info is None:
+ raise Exception("read file '{}' failed.".format(_deps_file))
+ _filename_snippet = re.search(r'(.*).json', _filename).group(1)
+ part_name, module_name = _parse_module_name(_filename_snippet)
+ module_alias = '{}:{}'.format(part_name, module_name)
+ deps_data[module_alias] = module_deps_info
+ return deps_data
+
+
+def _merge_external_deps_label(deps_data, external_deps_data):
+ for _module_alias, _info in deps_data.items():
+ external_deps = _info.get('external_deps')
+ if not external_deps:
+ continue
+ ext_deps_label_info = external_deps_data.get(_module_alias)
+ if not ext_deps_label_info:
+ raise Exception(
+ "module '{}' external deps info is incorrect.".format(
+ _module_alias))
+ _info['external_deps_lable'] = ext_deps_label_info.get('deps')
+ return deps_data
+
+
+def get_all_deps_data(module_deps_files_path):
+ deps_data = _read_module_deps_info(module_deps_files_path)
+ build_out_dir = os.path.dirname(module_deps_files_path)
+ external_deps_data = _read_external_deps_info(build_out_dir)
+ all_deps_data = _merge_external_deps_label(deps_data, external_deps_data)
+ return all_deps_data
diff --git a/dsoftbus/build/tools/module_dependence/file_utils.py b/dsoftbus/build/tools/module_dependence/file_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..78bc0ce2dbb3dfd132fcac4f0feb1145e31e880e
--- /dev/null
+++ b/dsoftbus/build/tools/module_dependence/file_utils.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+
+
+def read_json_file(input_file):
+ if not os.path.exists(input_file):
+ print("file '{}' doesn't exist.".format(input_file))
+ return None
+
+ data = None
+ try:
+ with open(input_file, 'r') as input_f:
+ data = json.load(input_f)
+ except json.decoder.JSONDecodeError:
+ print("The file '{}' format is incorrect.".format(input_file))
+ raise
+ return data
+
+
+def write_json_file(output_file, content):
+ file_dir = os.path.dirname(os.path.abspath(output_file))
+ if not os.path.exists(file_dir):
+ os.makedirs(file_dir, exist_ok=True)
+ with open(output_file, 'w') as output_f:
+ json.dump(content, output_f, indent=2)
diff --git a/dsoftbus/build/tools/module_dependence/module_deps.py b/dsoftbus/build/tools/module_dependence/module_deps.py
new file mode 100755
index 0000000000000000000000000000000000000000..883efdddff99ca818c165f19e9fe16b1b66491a5
--- /dev/null
+++ b/dsoftbus/build/tools/module_dependence/module_deps.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+import file_utils
+import dependence_analysis
+
+
+def gen_module_deps(deps_data):
+ part_allowlist = ['unittest', 'moduletest', 'systemtest']
+ label_to_alias = {}
+ for _module_alias, _info in deps_data.items():
+ _module_label = _info.get('module_label').split('(')[0]
+ label_to_alias[_module_label] = _module_alias
+
+ modeule_deps_data = {}
+ for _module_alias, _info in deps_data.items():
+ deps_module_list = []
+ _part_name = _info.get('part_name')
+ if _part_name in part_allowlist:
+ continue
+ _deps_label_list = _info.get('deps')
+ for _deps_label in _deps_label_list:
+ _alias = label_to_alias.get(_deps_label)
+ if _alias is None:
+ continue
+ deps_module_list.append(_alias)
+ _external_deps_list = _info.get('external_deps')
+ for _ext_deps in _external_deps_list:
+ deps_module_list.append(_ext_deps)
+
+ deps_value = modeule_deps_data.get(_module_alias, [])
+ deps_value.extend(deps_module_list)
+ modeule_deps_data[_module_alias] = list(set(deps_value))
+ return modeule_deps_data
+
+
+def run(deps_files_path, output_path):
+ all_deps_data = dependence_analysis.get_all_deps_data(deps_files_path)
+ all_deps_data_file = os.path.join(output_path, 'all_deps_data.json')
+ file_utils.write_json_file(all_deps_data_file, all_deps_data)
+
+ module_deps_data = gen_module_deps(all_deps_data)
+ module_deps_data_file = os.path.join(output_path, 'module_deps_info.json')
+ file_utils.write_json_file(module_deps_data_file, module_deps_data)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--deps-files-path', required=True)
+ args = parser.parse_args(argv)
+
+ if not os.path.exists(args.deps_files_path):
+ raise Exception("'{}' doesn't exist.".format(args.deps_files_path))
+ output_path = os.path.join(os.path.dirname(args.deps_files_path),
+ 'module_deps_info')
+ print("------Generate module dependency info------")
+ run(args.deps_files_path, output_path)
+ print('module deps data output to {}'.format(output_path))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/tools/module_dependence/module_deps_tree.py b/dsoftbus/build/tools/module_dependence/module_deps_tree.py
new file mode 100755
index 0000000000000000000000000000000000000000..4267307b0db84607341159fb6dfd481fd4f780f9
--- /dev/null
+++ b/dsoftbus/build/tools/module_dependence/module_deps_tree.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+import sys
+import argparse
+from pyecharts import options as opts
+from pyecharts.charts import Tree
+from pyecharts.globals import CurrentConfig
+
+
+def read_json_file(input_file):
+ if not os.path.exists(input_file):
+ print("file '{}' doesn't exist.".format(input_file))
+ return None
+
+ data = None
+ try:
+ with open(input_file, 'r') as input_f:
+ data = json.load(input_f)
+ except json.decoder.JSONDecodeError:
+ print("The file '{}' format is incorrect.".format(input_file))
+ raise
+ return data
+
+
+class ModuleTree:
+ def __init__(self, data, inner_kits_adapter):
+ self.data = data
+ self.inner_kits_adapter = inner_kits_adapter
+ self.isroot = True
+ self.module_count = {}
+
+ def parse_part_module_name(self, node_name):
+ split_result = node_name.split(':')
+ part_name = split_result[0]
+ module_name = split_result[1]
+ return part_name, module_name
+
+ def get_label_color(self, node_name):
+ if self.module_count.get(node_name, 'no_module') == 'no_module':
+ self.module_count[node_name] = 1
+ label_opt = opts.LabelOpts(color='#000000',
+ font_style='normal',
+ font_family='Times New Roman',
+ font_size=16)
+ else:
+ self.module_count[node_name] += 1
+ label_opt = opts.LabelOpts(color='#ff0000',
+ font_style='normal',
+ font_family='Times New Roman',
+ font_size=16)
+ return label_opt
+
+ def node_tree(self, node_name):
+ if node_name not in self.data:
+ part_name, module_name = self.parse_part_module_name(node_name)
+ new_part_name = self.inner_kits_adapter.get(
+ part_name, "no_adapter")
+ old_node_name = node_name
+ if new_part_name != "no_adapter":
+ node_name = "{}:{}".format(new_part_name, module_name)
+ if node_name not in self.data:
+ if self.isroot:
+ raise Exception(
+ "module '{}' doesn't exist.".format(old_node_name))
+ else:
+ print("Waring! module '{}' doesn't exist.".format(
+ old_node_name))
+ tree_data = {"name": node_name}
+ return tree_data
+
+ self.isroot = False
+ module_deps = self.data.get(node_name)
+ if len(module_deps) > 0:
+ children = []
+ for module_dep in module_deps:
+ children.append(self.node_tree(module_dep))
+ tree_data = {"name": node_name, "children": children}
+ else:
+ tree_data = {"name": node_name}
+ return tree_data
+
+ def color_tree(self, tree_data):
+ color_tree_data = {}
+ queue = [tree_data]
+ while len(queue) > 0:
+ node = queue.pop(0)
+ label_opt = self.get_label_color(node["name"])
+ node["label_opt"] = label_opt
+ children = node.get("children", "no_children")
+ if children != "no_children":
+ queue.extend(children)
+ return tree_data
+
+ def graph_tree(self, tree_data):
+ children = tree_data.get("children", "no_children")
+ if children != "no_children":
+ for index, value in enumerate(children):
+ children[index] = self.graph_tree(children[index])
+ return opts.TreeItem(name=tree_data["name"],
+ label_opts=tree_data["label_opt"],
+ children=children)
+ else:
+ return opts.TreeItem(name=tree_data["name"],
+ label_opts=tree_data["label_opt"])
+
+ def get_node_tree(self, node_name):
+ self.isroot = True
+ tree_data = self.node_tree(node_name)
+ self.module_count = {}
+ tree_data = self.color_tree(tree_data)
+ tree = self.graph_tree(tree_data)
+ return tree
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--module-name', required=True)
+ parser.add_argument('--module-deps-file', required=True)
+ args = parser.parse_args(argv)
+
+ node_name = args.module_name
+ inner_kits_adapter_file = os.path.join(
+ os.path.dirname(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
+ "ohos", "inner_kits_adapter.json")
+ output_file = os.path.join(os.path.dirname(args.module_deps_file),
+ "{}.html".format(node_name.replace(":", "__")))
+ tree_title = "{} module dependency tree".format(node_name)
+ CurrentConfig.ONLINE_HOST = ("https://cdn.jsdelivr.net/npm/"
+ "echarts@latest/dist/")
+
+ print("------Generate module dependency tree------")
+ module_deps = read_json_file(args.module_deps_file)
+ inner_kits_adapter = read_json_file(inner_kits_adapter_file)
+
+ module_tree_ = ModuleTree(module_deps, inner_kits_adapter)
+ tree_data = module_tree_.get_node_tree(node_name)
+
+ tree = (Tree(opts.InitOpts(width="1920px", height="1080px")).add(
+ "", [tree_data],
+ orient="LR",
+ initial_tree_depth=1,
+ is_roam=True,
+ symbol_size=10).set_global_opts(title_opts=opts.TitleOpts(
+ title=tree_title)).render(output_file))
+ print('module deps tree output to {}'.format(output_file))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/tools/module_dependence/part_deps.py b/dsoftbus/build/tools/module_dependence/part_deps.py
new file mode 100755
index 0000000000000000000000000000000000000000..181ded93f4acef1f80566a86500101fa2291a9fa
--- /dev/null
+++ b/dsoftbus/build/tools/module_dependence/part_deps.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import os
+import argparse
+
+import file_utils
+import dependence_analysis
+
+
+def gen_part_dependence(deps_data):
+ part_allowlist = ['unittest', 'moduletest', 'systemtest']
+ label_to_alias = {}
+ for _module_alias, _info in deps_data.items():
+ _module_label = _info.get('module_label').split('(')[0]
+ label_to_alias[_module_label] = _module_alias
+
+ part_deps_data = {}
+ for _module_alias, _info in deps_data.items():
+ deps_part_list = []
+ _part_name = _info.get('part_name')
+ if _part_name in part_allowlist:
+ continue
+ _deps_label_list = _info.get('deps')
+ for _deps_label in _deps_label_list:
+ _alias = label_to_alias.get(_deps_label)
+ if _alias is None:
+ continue
+ _dep_part_name = _alias.split(':')[0]
+ if _dep_part_name == _part_name:
+ continue
+ deps_part_list.append(_dep_part_name)
+ _external_deps_list = _info.get('external_deps')
+ for _ext_deps in _external_deps_list:
+ _dep_part_name = _ext_deps.split(':')[0]
+ if _dep_part_name == _part_name:
+ continue
+ deps_part_list.append(_dep_part_name)
+
+ deps_value = part_deps_data.get(_part_name, [])
+ deps_value.extend(deps_part_list)
+ part_deps_data[_part_name] = list(set(deps_value))
+ return part_deps_data
+
+
+def _drawing_part_deps(part_deps_data, output_path):
+ from pyecharts.charts import Graph
+ from pyecharts import options as opts
+ from pyecharts.globals import CurrentConfig
+
+ CurrentConfig.ONLINE_HOST = "https://cdn.jsdelivr.net/npm"\
+ "/echarts@latest/dist/"
+
+ part_allowlist = ['unittest', 'moduletest', 'systemtest']
+ part_nodes = []
+ parts_links = []
+ nodes_sets = set()
+ for _part_name, _dep_parts in part_deps_data.items():
+ if _part_name in part_allowlist:
+ continue
+ if _part_name not in nodes_sets:
+ nodes_sets.add(_part_name)
+ part_nodes.append(opts.GraphNode(
+ name=_part_name,
+ symbol='circle',
+ symbol_size=20,
+ label_opts=opts.LabelOpts(
+ font_style='normal',
+ font_family='Times New Roman',
+ font_size=16
+ )
+ ))
+ for _dep_part in _dep_parts:
+ if _part_name in part_deps_data.get(_dep_part, []):
+ parts_links.append(opts.GraphLink(
+ source=_part_name,
+ target=_dep_part,
+ linestyle_opts=opts.LineStyleOpts(
+ color='#ff0000', width=0.5)
+ ))
+ else:
+ parts_links.append(opts.GraphLink(
+ source=_part_name,
+ target=_dep_part,
+ linestyle_opts=opts.LineStyleOpts(
+ color='#000000', width=0.5)
+ ))
+ _output_graph_file = os.path.join(output_path, 'part-deps-grahp.html')
+ graph = (
+ Graph(opts.InitOpts(width="1920px", height="1080px"))
+ .add(
+ "",
+ part_nodes,
+ parts_links,
+ repulsion=800,
+ edge_symbol=['', 'arrow'],
+ edge_symbol_size=6
+ )
+ .set_global_opts(title_opts=opts.TitleOpts(title="part-deps-grahp"))
+ .render(_output_graph_file)
+ )
+
+
+def run(deps_files_path, output_path, is_graph):
+ all_deps_data = dependence_analysis.get_all_deps_data(deps_files_path)
+ all_deps_data_file = os.path.join(output_path, 'all_deps_data.json')
+ file_utils.write_json_file(all_deps_data_file, all_deps_data)
+
+ part_deps_data = gen_part_dependence(all_deps_data)
+ part_deps_data_file = os.path.join(output_path, 'part_deps_info.json')
+ file_utils.write_json_file(part_deps_data_file, part_deps_data)
+ if is_graph:
+ _drawing_part_deps(part_deps_data, output_path)
+
+
+def main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--deps-files-path', required=True)
+ parser.add_argument('--graph', action='store_true')
+ parser.set_defaults(graph=False)
+ args = parser.parse_args(argv)
+
+ if not os.path.exists(args.deps_files_path):
+ raise Exception("'{}' doesn't exist.".format(args.deps_files_path))
+ output_path = os.path.join(os.path.dirname(
+ args.deps_files_path), 'part_deps_info')
+ print("------Generate part dependency info------")
+ run(args.deps_files_path, output_path, args.graph)
+ print('part deps data output to {}'.format(output_path))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/dsoftbus/build/version.gni b/dsoftbus/build/version.gni
new file mode 100644
index 0000000000000000000000000000000000000000..b851573bb0c2f9a79d2394b29b9824813f9f4abd
--- /dev/null
+++ b/dsoftbus/build/version.gni
@@ -0,0 +1,33 @@
+# Copyright (c) 2021 Huawei Device Co., Ltd.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# HOSP version
+declare_args() {
+ hosp_version = "2.0"
+ sdk_version = "3.0.0.0"
+ api_version = "7"
+
+ # Release type, optional values: Betax, RCx...
+ release_type = "Release"
+ meta_version = "3.0.0"
+}
+
+# ohos SDK version
+declare_args() {
+ current_sdk_version = sdk_version
+}
+
+# ohos NDK version
+declare_args() {
+ current_ndk_version = current_sdk_version
+}
diff --git a/dsoftbus/build/zip.py b/dsoftbus/build/zip.py
new file mode 100755
index 0000000000000000000000000000000000000000..8038c9b380d1320bc65dbf0e7cb888bb5c3ec947
--- /dev/null
+++ b/dsoftbus/build/zip.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Archives a set of files.
+"""
+
+import ast
+import optparse
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), 'scripts'))
+from util import build_utils
+
+
+def main():
+ parser = optparse.OptionParser()
+ build_utils.add_depfile_option(parser)
+
+ parser.add_option('--inputs', help='List of files to archive.')
+ parser.add_option('--output', help='Path to output archive.')
+ parser.add_option('--base-dir',
+ help='If provided, the paths in the archive will be '
+ 'relative to this directory',
+ default='.')
+
+ options, _ = parser.parse_args()
+
+ inputs = ast.literal_eval(options.inputs)
+ output = options.output
+ base_dir = options.base_dir
+
+ with build_utils.atomic_output(output) as f:
+ build_utils.do_zip(inputs, f, base_dir)
+
+ if options.depfile:
+ build_utils.write_depfile(options.depfile, output)
+
+
+if __name__ == '__main__':
+ sys.exit(main())