From 884a21ddbee58fefa03bc0b8d348d306275b17e8 Mon Sep 17 00:00:00 2001 From: YijianCheng Date: Sat, 11 Jun 2022 18:55:55 +0800 Subject: [PATCH 01/30] add hetu-mpp connector --- hetu-docs/zh/connector/mpp.md | 70 ++++ hetu-mpp/pom.xml | 283 ++++++++++++++ .../io/hetu/core/plugin/mpp/MppConfig.java | 229 +++++++++++ .../core/plugin/mpp/MppConnectorFactory.java | 154 ++++++++ .../io/hetu/core/plugin/mpp/MppMetadata.java | 220 +++++++++++ .../core/plugin/mpp/MppMetadataFactory.java | 242 ++++++++++++ .../io/hetu/core/plugin/mpp/MppModule.java | 173 +++++++++ .../io/hetu/core/plugin/mpp/MppPlugin.java | 35 ++ .../hetu/core/plugin/mpp/MppSplitManager.java | 361 ++++++++++++++++++ .../core/plugin/mpp/RunningTaskHashMap.java | 70 ++++ .../core/plugin/mpp/SynchronizedHashMap.java | 256 +++++++++++++ .../hetu/core/plugin/mpp/TableMoveLock.java | 28 ++ .../core/plugin/mpp/scheduler/Scheduler.java | 121 ++++++ .../plugin/mpp/scheduler/db/GsussDBOpt.java | 150 ++++++++ .../mpp/scheduler/db/GsussDBOptThread.java | 149 ++++++++ .../plugin/mpp/scheduler/entity/ETLInfo.java | 64 ++++ .../mpp/scheduler/entity/TableSchema.java | 75 ++++ .../plugin/mpp/scheduler/hadoop/HiveOpt.java | 56 +++ .../plugin/mpp/scheduler/utils/Const.java | 42 ++ .../core/plugin/mpp/scheduler/utils/Util.java | 118 ++++++ hetu-server/src/main/provisio/hetu.xml | 6 + pom.xml | 7 + .../plugin/hive/HiveSplitLoader.java | 2 +- .../plugin/hive/HiveSplitManager.java | 2 +- .../plugin/hive/HiveSplitSource.java | 2 +- .../plugin/hive/HiveVacuumSplitSource.java | 4 +- presto-main/etc/catalog/mpp.properties | 30 ++ presto-main/etc/config.properties | 1 + 28 files changed, 2945 insertions(+), 5 deletions(-) create mode 100644 hetu-docs/zh/connector/mpp.md create mode 100644 hetu-mpp/pom.xml create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConnectorFactory.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppModule.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppPlugin.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java create mode 100755 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java create mode 100755 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java create mode 100755 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java create mode 100644 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java create mode 100755 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java create mode 100755 hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java create mode 100644 presto-main/etc/catalog/mpp.properties diff --git a/hetu-docs/zh/connector/mpp.md b/hetu-docs/zh/connector/mpp.md new file mode 100644 index 000000000..c7633f766 --- /dev/null +++ b/hetu-docs/zh/connector/mpp.md @@ -0,0 +1,70 @@ + +# mpp连接器 + +本方案旨在通过GDS高速协议来提高openLookeng引擎读取GaussDB的性能。 + +目前对于GaussDB数据库的数据查询方式是通过JDBC Connector来实现的,和大部分关系型数据库连接类似。由于传统的MySQL等数据库存储的数据量小,因此通过JDBC方式来获取数据无可厚非。但是由于GaussDB是一种MPP类的分布式数据库,存储数据量大,且主要用于OLAP场景的分析,导致通过原生JDBC方式拉取数据的方式变得低效,因为它是一种单进单出的模式。 + +后来,社区也针对这种情况进行了基于JDBC的优化,例如增加了下推,在引擎端增加了多split并发,但是依然无法避免数据源端的单并发,性能瓶颈依然无法得到完全的解决,而且多并发还会带来多连接的情况,对数据库集群造成一定的压力。 + +本方案将会通过解决数据源端和引擎端并发问题来提高引擎查询的效率。 + +# mpp连接器设计思路 + +本方案将mpp类数据库的查询转换成对hive外表的查询,即将mpp类数据库的表数据快速导出到外部的分布式文件系统或分布式缓存,然后通过挂hive外表的方式通过hive connector来进行查询。本方案的主要权衡点在于导出数据的效率和mpp数据库通过单CN节点对外输出数据的效率的比较。随着查询数据量的递增,本方案的效率提高也会越来越高。 +更详细的内容见社区分享:https://mp.weixin.qq.com/s/Q-t592UerICHNXI63rhtPg + +## 配置 + +要配置mpp连接器,在`etc/catalog`中创建一个目录属性文件,例如`mpp.properties`,使用以下内容创建文件,并根据设置替换连接属性: + +``` properties +本方案本质上是将查询gaussdb数据库转换成查询hive,因此以下配置均基于此原理。 + +connector.name=mpp +# 配置用来做最后查询的hive仓库 +hive.metastore.uri=thrift://localhost:9083 + +etl-reuse=false #是否复用本次导数结果 + +#GDS baseinfo +#gds进程,基于postgres的fdw机制实现的一个快速导数进程,gaussdb官方插件 +gds-list=gsfs://localhost:port1|base_path #gds的ip和端口,以及该进程启动时候的basepath,多个gds进程可以通过逗号分隔 +aux-url=alluxio://localhost:19998 #alluxio的ip和端口 +base-aux=/gdsdata/ #alluxio中用来为gds导出数据服务的路径,可自定义 + +#hive info +# 用来进行创建外表等操作的hive仓库连接配置 +hive-user=username +hive-passwd=password +hive-db=xxx_db +hive-url=jdbc:hive2://localhost:10000/ + +# hive template +## 进行hive外表创建的相关SQL模板,一般无需更改 +hsql-drop=drop table if exists ${table_name} +hsql-create=CREATE EXTERNAL TABLE ${table_name} ( ${schema_info} ) COMMENT 'gds external table' ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' LOCATION '${pipe_to_aux_base_path}' + +# gsDB connection info +# 要查询的GaussDB数据库相关连接信息 +gs-driver=org.postgresql.Driver +gs-url=jdbc:postgresql://localhost:25308/schema +gs-user=user +gs-passwd=password + +# gaussdb template +# 利用gds触发导数的相SQL模板,一般无需更改 +gsql-create=create foreign table ${gaussdb_name}.ext_${table_name} ( ${schema_info} ) SERVER gsmpp_server OPTIONS ( LOCATION '${gds_foreign_location}', FORMAT 'text', DELIMITER E',', NULL '', encoding 'UTF-8', noescaping 'true', EOL E'\\n', out_filename_prefix '${table_name}') WRITE ONLY; +gsql-insert=insert into ${gaussdb_name}.ext_${table_name} select ${schema_info} from ${gaussdb_name}.${table_name}; +gsql-drop=drop foreign table if exists ${gaussdb_name}.ext_${table_name}; + +``` + +## 使用 + +mpp连接器会将查询转换为对所配置的gaussdb的查询,因此如果你要查询opengauss.testdb.usertbl,如果想通过mpp +connector进行快速查询,则可以写成: + + select * from mpp.testdb.usertbl; + +如果您对mpp connector有更多的需求和见解,欢迎提issue和pr。 \ No newline at end of file diff --git a/hetu-mpp/pom.xml b/hetu-mpp/pom.xml new file mode 100644 index 000000000..e5549a885 --- /dev/null +++ b/hetu-mpp/pom.xml @@ -0,0 +1,283 @@ + + +4.0.0 + + + io.hetu.core + presto-root + 1.7.0-SNAPSHOT + + +hetu-mpp +hetu - Mpp Connector +hetu-plugin + + + ${project.parent.basedir} + + + + + io.hetu.core + presto-hive + + + + io.airlift + json + + + + io.airlift + configuration + + + + io.airlift + bootstrap + + + + org.weakref + jmxutils + + + + io.airlift + event + + + + io.hetu.core + presto-plugin-toolkit + + + + io.airlift + log + + + + io.hetu.core + presto-orc + + + + com.google.guava + guava + + + + com.google.inject + guice + + + + javax.inject + javax.inject + + + + com.google.code.findbugs + jsr305 + true + + + + io.airlift + concurrent + + + + io.airlift + stats + + + + + io.hetu.core + presto-spi + provided + + + + io.airlift + slice + provided + + + + io.airlift + units + provided + + + + com.fasterxml.jackson.core + jackson-annotations + provided + + + + org.openjdk.jol + jol-core + provided + + + + + + + + + + + net.jodah + expiringmap + 0.5.9 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + io.prestosql.hadoop + hadoop-apache + runtime + + + + commons-io + commons-io + runtime + + + + + org.testng + testng + test + + + + io.airlift + testing + test + + + + org.assertj + assertj-core + test + + + + io.hetu.core + presto-hive + test-jar + test + + + + io.prestosql.hive + hive-apache + + + + io.hetu.core + presto-main + test + + + + + + default + + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/TestHive.java + **/TestHiveFileSystemS3.java + **/TestHiveFileSystemS3SelectPushdown.java + + + + + + + + test-mpp + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/TestHive.java + + + + + + + + test-mpp-s3 + + + + org.apache.maven.plugins + maven-surefire-plugin + + + **/TestHiveFileSystemS3.java + **/TestHiveFileSystemS3SelectPushdown.java + + + + + + + + \ No newline at end of file diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java new file mode 100644 index 000000000..901e5f578 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java @@ -0,0 +1,229 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import io.airlift.configuration.Config; + +public class MppConfig +{ + private String gdsList; + private boolean etlReuse; + private String baseAux; + private String auxUrl; + + private String hiveDb; + private String hiveUrl; + private String hiveUser; + private String hivePasswd; + private String hsqlDrop; + private String hsqlCreate; + + private String gsDriver; + private String gsUrl; + private String gsUser; + private String gsPasswd; + + private String gsqlCreate; + private String gsqlInsert; + private String gsqlDrop; + + public String getGdsList() + { + return gdsList; + } + + @Config("gds-list") + public MppConfig setGdsList(String gdsList) + { + this.gdsList = gdsList; + return this; + } + + public boolean isEtlReuse() + { + return etlReuse; + } + + @Config("etl-reuse") + public void setEtlReuse(boolean etlReuse) + { + this.etlReuse = etlReuse; + } + + public String getGsDriver() + { + return gsDriver; + } + + @Config("gs-driver") + public void setGsDriver(String gsDriver) + { + this.gsDriver = gsDriver; + } + + public String getGsUrl() + { + return gsUrl; + } + + @Config("gs-url") + public void setGsUrl(String gsUrl) + { + this.gsUrl = gsUrl; + } + + public String getGsUser() + { + return gsUser; + } + + @Config("gs-user") + public void setGsUser(String gsUser) + { + this.gsUser = gsUser; + } + + public String getGsPasswd() + { + return gsPasswd; + } + + @Config("gs-passwd") + public void setGsPasswd(String gsPasswd) + { + this.gsPasswd = gsPasswd; + } + + public String getGsqlCreate() + { + return gsqlCreate; + } + + @Config("gsql-create") + public void setGsqlCreate(String gsqlCreate) + { + this.gsqlCreate = gsqlCreate; + } + + public String getGsqlInsert() + { + return gsqlInsert; + } + + @Config("gsql-insert") + public void setGsqlInsert(String gsqlInsert) + { + this.gsqlInsert = gsqlInsert; + } + + public String getGsqlDrop() + { + return gsqlDrop; + } + + @Config("gsql-drop") + public void setGsqlDrop(String gsqlDrop) + { + this.gsqlDrop = gsqlDrop; + } + + public String getBaseAux() + { + return baseAux; + } + + @Config("base-aux") + public void setBaseAux(String baseAux) + { + this.baseAux = baseAux; + } + + public String getAuxUrl() + { + return auxUrl; + } + + @Config("aux-url") + public void setAuxUrl(String auxUrl) + { + this.auxUrl = auxUrl; + } + + public String getHiveDb() + { + return hiveDb; + } + + @Config("hive-db") + public void setHiveDb(String hiveDb) + { + this.hiveDb = hiveDb; + } + + public String getHiveUrl() + { + return hiveUrl; + } + + @Config("hive-url") + public void setHiveUrl(String hiveUrl) + { + this.hiveUrl = hiveUrl; + } + + public String getHiveUser() + { + return hiveUser; + } + + @Config("hive-user") + public void setHiveUser(String hiveUser) + { + this.hiveUser = hiveUser; + } + + public String getHivePasswd() + { + return hivePasswd; + } + + @Config("hive-passwd") + public void setHivePasswd(String hivePasswd) + { + this.hivePasswd = hivePasswd; + } + + public String getHsqlDrop() + { + return hsqlDrop; + } + + @Config("hsql-drop") + public void setHsqlDrop(String hsqlDrop) + { + this.hsqlDrop = hsqlDrop; + } + + public String getHsqlCreate() + { + return hsqlCreate; + } + + @Config("hsql-create") + public void setHsqlCreate(String hsqlCreate) + { + this.hsqlCreate = hsqlCreate; + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConnectorFactory.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConnectorFactory.java new file mode 100644 index 000000000..6d1435785 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConnectorFactory.java @@ -0,0 +1,154 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import com.google.common.collect.ImmutableSet; +import com.google.inject.Injector; +import com.google.inject.Key; +import com.google.inject.TypeLiteral; +import io.airlift.bootstrap.Bootstrap; +import io.airlift.bootstrap.LifeCycleManager; +import io.airlift.event.client.EventModule; +import io.airlift.json.JsonModule; +import io.prestosql.plugin.base.jmx.MBeanServerModule; +import io.prestosql.plugin.hive.ConnectorObjectNameGeneratorModule; +import io.prestosql.plugin.hive.HiveAnalyzeProperties; +import io.prestosql.plugin.hive.HiveCatalogName; +import io.prestosql.plugin.hive.HiveConnector; +import io.prestosql.plugin.hive.HiveConnectorFactory; +import io.prestosql.plugin.hive.HiveMetadataFactory; +import io.prestosql.plugin.hive.HiveProcedureModule; +import io.prestosql.plugin.hive.HiveSchemaProperties; +import io.prestosql.plugin.hive.HiveSessionProperties; +import io.prestosql.plugin.hive.HiveTableProperties; +import io.prestosql.plugin.hive.HiveTransactionManager; +import io.prestosql.plugin.hive.NodeVersion; +import io.prestosql.plugin.hive.authentication.HiveAuthenticationModule; +import io.prestosql.plugin.hive.gcs.HiveGcsModule; +import io.prestosql.plugin.hive.metastore.HiveMetastore; +import io.prestosql.plugin.hive.metastore.HiveMetastoreModule; +import io.prestosql.plugin.hive.s3.HiveS3Module; +import io.prestosql.plugin.hive.security.HiveSecurityModule; +import io.prestosql.plugin.hive.security.SystemTableAwareAccessControl; +import io.prestosql.spi.NodeManager; +import io.prestosql.spi.PageIndexerFactory; +import io.prestosql.spi.PageSorter; +import io.prestosql.spi.VersionEmbedder; +import io.prestosql.spi.classloader.ThreadContextClassLoader; +import io.prestosql.spi.connector.Connector; +import io.prestosql.spi.connector.ConnectorAccessControl; +import io.prestosql.spi.connector.ConnectorContext; +import io.prestosql.spi.connector.ConnectorNodePartitioningProvider; +import io.prestosql.spi.connector.ConnectorPageSinkProvider; +import io.prestosql.spi.connector.ConnectorPageSourceProvider; +import io.prestosql.spi.connector.ConnectorSplitManager; +import io.prestosql.spi.connector.classloader.ClassLoaderSafeConnectorPageSinkProvider; +import io.prestosql.spi.connector.classloader.ClassLoaderSafeConnectorPageSourceProvider; +import io.prestosql.spi.connector.classloader.ClassLoaderSafeConnectorSplitManager; +import io.prestosql.spi.connector.classloader.ClassLoaderSafeNodePartitioningProvider; +import io.prestosql.spi.heuristicindex.IndexClient; +import io.prestosql.spi.procedure.Procedure; +import io.prestosql.spi.type.TypeManager; +import org.weakref.jmx.guice.MBeanModule; + +import java.util.Map; +import java.util.Optional; +import java.util.Set; + +import static com.google.common.base.Throwables.throwIfUnchecked; +import static java.util.Objects.requireNonNull; + +public class MppConnectorFactory + extends HiveConnectorFactory +{ + private final ClassLoader classLoader; + + public MppConnectorFactory(String name, ClassLoader classLoader, Optional metastore) + { + super(name, classLoader, metastore); + this.classLoader = requireNonNull(classLoader, "classLoader is null"); + } + + @Override + public Connector create(String catalogName, Map config, ConnectorContext context) + { + requireNonNull(config, "config is null"); + + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) { + Bootstrap app = new Bootstrap( + new EventModule(), + new MBeanModule(), + new ConnectorObjectNameGeneratorModule(catalogName), + new JsonModule(), + new MppModule(), + new HiveS3Module(), + new HiveGcsModule(), + new HiveMetastoreModule(Optional.empty()), + new HiveSecurityModule(), + new HiveAuthenticationModule(), + new HiveProcedureModule(), + new MBeanServerModule(), + binder -> { + binder.bind(NodeVersion.class).toInstance(new NodeVersion(context.getNodeManager().getCurrentNode().getVersion())); + binder.bind(NodeManager.class).toInstance(context.getNodeManager()); + binder.bind(VersionEmbedder.class).toInstance(context.getVersionEmbedder()); + binder.bind(TypeManager.class).toInstance(context.getTypeManager()); + binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory()); + binder.bind(PageSorter.class).toInstance(context.getPageSorter()); + binder.bind(HiveCatalogName.class).toInstance(new HiveCatalogName(catalogName)); + binder.bind(IndexClient.class).toInstance(context.getIndexClient()); + }); + + Injector injector = app + .strictConfig() + .doNotInitializeLogging() + .setRequiredConfigurationProperties(config) + .initialize(); + + LifeCycleManager lifeCycleManager = injector.getInstance(LifeCycleManager.class); + HiveMetadataFactory metadataFactory = injector.getInstance(HiveMetadataFactory.class); + HiveTransactionManager transactionManager = injector.getInstance(HiveTransactionManager.class); + ConnectorSplitManager splitManager = injector.getInstance(ConnectorSplitManager.class); + ConnectorPageSourceProvider connectorPageSource = injector.getInstance(ConnectorPageSourceProvider.class); + ConnectorPageSinkProvider pageSinkProvider = injector.getInstance(ConnectorPageSinkProvider.class); + ConnectorNodePartitioningProvider connectorDistributionProvider = injector.getInstance(ConnectorNodePartitioningProvider.class); + HiveSessionProperties hiveSessionProperties = injector.getInstance(HiveSessionProperties.class); + HiveTableProperties hiveTableProperties = injector.getInstance(HiveTableProperties.class); + HiveAnalyzeProperties hiveAnalyzeProperties = injector.getInstance(HiveAnalyzeProperties.class); + ConnectorAccessControl accessControl = new SystemTableAwareAccessControl(injector.getInstance(ConnectorAccessControl.class)); + Set procedures = injector.getInstance(Key.get(new TypeLiteral>() {})); + + return new HiveConnector( + lifeCycleManager, + metadataFactory, + transactionManager, + new ClassLoaderSafeConnectorSplitManager(splitManager, classLoader), + new ClassLoaderSafeConnectorPageSourceProvider(connectorPageSource, classLoader), + new ClassLoaderSafeConnectorPageSinkProvider(pageSinkProvider, classLoader), + new ClassLoaderSafeNodePartitioningProvider(connectorDistributionProvider, classLoader), + ImmutableSet.of(), + procedures, + hiveSessionProperties.getSessionProperties(), + HiveSchemaProperties.SCHEMA_PROPERTIES, + hiveTableProperties.getTableProperties(), + hiveAnalyzeProperties.getAnalyzeProperties(), + accessControl, + classLoader); + } + catch (Exception e) { + throwIfUnchecked(e); + throw new RuntimeException(e); + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java new file mode 100644 index 000000000..04b5aa155 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java @@ -0,0 +1,220 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import com.google.common.collect.ImmutableList; +import com.google.inject.Inject; +import io.airlift.json.JsonCodec; +import io.airlift.log.Logger; +import io.airlift.units.Duration; +import io.hetu.core.plugin.mpp.scheduler.Scheduler; +import io.hetu.core.plugin.mpp.scheduler.db.GsussDBOpt; +import io.hetu.core.plugin.mpp.scheduler.entity.ETLInfo; +import io.hetu.core.plugin.mpp.scheduler.utils.Const; +import io.hetu.core.plugin.mpp.scheduler.utils.Util; +import io.prestosql.plugin.hive.HdfsEnvironment; +import io.prestosql.plugin.hive.HiveBucketing; +import io.prestosql.plugin.hive.HiveErrorCode; +import io.prestosql.plugin.hive.HiveMetadata; +import io.prestosql.plugin.hive.HivePartitionManager; +import io.prestosql.plugin.hive.HiveTableHandle; +import io.prestosql.plugin.hive.LocationService; +import io.prestosql.plugin.hive.PartitionUpdate; +import io.prestosql.plugin.hive.TypeTranslator; +import io.prestosql.plugin.hive.authentication.HiveIdentity; +import io.prestosql.plugin.hive.metastore.MetastoreUtil; +import io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore; +import io.prestosql.plugin.hive.metastore.Table; +import io.prestosql.plugin.hive.security.AccessControlMetadata; +import io.prestosql.plugin.hive.statistics.HiveStatisticsProvider; +import io.prestosql.spi.PrestoException; +import io.prestosql.spi.connector.ConnectorSession; +import io.prestosql.spi.connector.SchemaTableName; +import io.prestosql.spi.type.TypeManager; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.ScheduledExecutorService; + +import static io.prestosql.plugin.hive.HiveUtil.getPartitionKeyColumnHandles; +import static java.util.Collections.emptyList; +import static java.util.Objects.requireNonNull; + +public class MppMetadata + extends HiveMetadata +{ + public Scheduler scheduler; + public MppConfig mppConfig; + public boolean createsOfNonManagedTablesEnabled; + public static Logger logger = Logger.get(MppMetadata.class); + + @Inject + public MppMetadata( + SemiTransactionalHiveMetastore metastore, + HdfsEnvironment hdfsEnvironment, + HivePartitionManager partitionManager, + boolean writesToNonManagedTablesEnabled, + boolean createsOfNonManagedTablesEnabled, + boolean tableCreatesWithLocationAllowed, + TypeManager typeManager, + LocationService locationService, + JsonCodec partitionUpdateCodec, + TypeTranslator typeTranslator, + String prestoVersion, + HiveStatisticsProvider hiveStatisticsProvider, + AccessControlMetadata accessControlMetadata, + boolean autoVacuumEnabled, + int vacuumDeltaNumThreshold, + double vacuumDeltaPercentThreshold, + ScheduledExecutorService vacuumExecutorService, + Optional vacuumCollectorInterval, + ScheduledExecutorService hiveMetastoreClientService, + Scheduler scheduler, + MppConfig mppConfig) + { + super(metastore, hdfsEnvironment, partitionManager, writesToNonManagedTablesEnabled, + createsOfNonManagedTablesEnabled, tableCreatesWithLocationAllowed, + typeManager, locationService, partitionUpdateCodec, typeTranslator, + prestoVersion, hiveStatisticsProvider, accessControlMetadata, + autoVacuumEnabled, vacuumDeltaNumThreshold, vacuumDeltaPercentThreshold, + vacuumExecutorService, vacuumCollectorInterval, hiveMetastoreClientService); + this.scheduler = scheduler; + this.mppConfig = mppConfig; + this.createsOfNonManagedTablesEnabled = createsOfNonManagedTablesEnabled; + } + + @Override + public List listSchemaNames(ConnectorSession session) + { + List dbList = metastore.getAllDatabases(); + return dbList; + } + + @Override + public List listTables(ConnectorSession session, Optional optionalSchemaName) + { + ImmutableList.Builder tableNames = ImmutableList.builder(); + for (String schemaName : listSchemas(session, optionalSchemaName)) { + for (String tableName : metastore.getAllTables(schemaName).orElse(emptyList())) { + tableNames.add(new SchemaTableName(schemaName, tableName)); + } + } + return tableNames.build(); + } + + private List listSchemas(ConnectorSession session, Optional schemaName) + { + if (schemaName.isPresent()) { + return ImmutableList.of(schemaName.get()); + } + return listSchemaNames(session); + } + + @Override + public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) + { + requireNonNull(tableName, "tableName is null"); + String gsSchemaName = tableName.getSchemaName(); + String schemaName = mppConfig.getHiveDb(); + String tblName = tableName.getTableName(); + String threadName = Const.tableStatus.getThreadName(); +// Optional table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()); + Optional
table = metastore.getTable(new HiveIdentity(session), schemaName, tableName.getTableName()); + String tblIdentifier = schemaName + "." + tblName; + logger.info("Mpp scheduler for " + tblIdentifier + " started"); + if (!table.isPresent()) { + logger.info("Hive(Mpp) table " + tblIdentifier + " is not present"); + determineWhetherToETL(gsSchemaName, schemaName, tblName, threadName, tblIdentifier); + } + else { + if (Const.etlInfoMap.containsKey(tblIdentifier) && Const.tableStatus.tableStatusKeysExists(tblIdentifier)) { +// have etled at least onece + if (mppConfig.isEtlReuse()) { +// scheduler.prepare(gdsServer, schemas, schemaName, tblName, false); + logger.info("Hive(Mpp) table " + tblIdentifier + " existed and reuse it"); + } + else { + logger.info("Hive(Mpp) " + tblIdentifier + " existed in this app runtime but not reuse it and will redo!"); + determineWhetherToETL(gsSchemaName, schemaName, tblName, threadName, tblIdentifier); + } + } + else { +// have etled in last restart, +// or have etled and still etling +// or just create and etling +// we can use two strategies to judge: rules and time interval +// ETLInfo etlInfo = Const.etlInfoMap.get(tblIdentifier); + if (Const.tableStatus.tableStatusKeysExists(tblIdentifier)) { +// scheduler.prepare(gdsServer, schemas, schemaName, tblName, false); + logger.info("[2]Hive(MPP) " + tblIdentifier + " Table is existed and is etling by others and reuse it!"); + } + else { + logger.info("[2]Hive(MPP) " + tblIdentifier + " existed in last app runtime but not reuse it and will redo!"); + determineWhetherToETL(gsSchemaName, schemaName, tblName, threadName, tblIdentifier); + } + } + } + +// table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()); + table = metastore.getTable(new HiveIdentity(session), schemaName, tableName.getTableName()); + + // we must not allow system tables due to how permissions are checked in SystemTableAwareAccessControl + if (getSourceTableNameFromSystemTable(tableName).isPresent()) { + throw new PrestoException(HiveErrorCode.HIVE_INVALID_METADATA, "Unexpected table present in Hive metastore: " + tableName); + } + + MetastoreUtil.verifyOnline(tableName, Optional.empty(), MetastoreUtil.getProtectMode(table.get()), table.get().getParameters()); + + Map parameters = new HashMap<>(); + parameters.putAll(table.get().getParameters()); + + String format = table.get().getStorage().getStorageFormat().getOutputFormatNullable(); + if (format != null) { + parameters.put(STORAGE_FORMAT, format); + } + + return new HiveTableHandle( + schemaName, + tableName.getTableName(), + parameters, + getPartitionKeyColumnHandles(table.get()), + HiveBucketing.getHiveBucketHandle(table.get())); + } + + private void determineWhetherToETL(String gsSchemaName, String schemaName, String tblName, String threadName, String tblIdentifier) + { + List runningTaskList = Const.runningThreadMap.get(tblIdentifier); + Const.tableStatus.put(tblIdentifier, 0); + Const.etlInfoMap.put(tblIdentifier, new ETLInfo(0, Util.getDate(), "")); + int size; + String lock = TableMoveLock.getLock(tblIdentifier); + Map schemas = null; + synchronized (lock) { + runningTaskList.add(threadName); + Const.runningThreadMap.put(tblIdentifier, runningTaskList); + size = Const.runningThreadMap.get(tblIdentifier).size(); + if (size == 1) { + schemas = GsussDBOpt.getSchemas(mppConfig, "", gsSchemaName, tblName); + logger.info("Record table " + tblIdentifier + " into tableStatus and eltlInfoMap"); + scheduler.prepareHiveExternalTable(schemas, schemaName, tblName); + } + } + if (size == 1) { + Map.Entry gdsServer = scheduler.getGDS(); + scheduler.startGdsProcess(gdsServer, schemas, gsSchemaName, tblName); + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java new file mode 100644 index 000000000..b59bb24fb --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java @@ -0,0 +1,242 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import io.airlift.concurrent.BoundedExecutor; +import io.airlift.json.JsonCodec; +import io.airlift.units.Duration; +import io.hetu.core.plugin.mpp.scheduler.Scheduler; +import io.prestosql.plugin.hive.ForHive; +import io.prestosql.plugin.hive.ForHiveMetastore; +import io.prestosql.plugin.hive.ForHiveTransactionHeartbeats; +import io.prestosql.plugin.hive.ForHiveVacuum; +import io.prestosql.plugin.hive.HdfsEnvironment; +import io.prestosql.plugin.hive.HiveConfig; +import io.prestosql.plugin.hive.HiveMetadata; +import io.prestosql.plugin.hive.HiveMetadataFactory; +import io.prestosql.plugin.hive.HivePartitionManager; +import io.prestosql.plugin.hive.LocationService; +import io.prestosql.plugin.hive.NodeVersion; +import io.prestosql.plugin.hive.PartitionUpdate; +import io.prestosql.plugin.hive.TypeTranslator; +import io.prestosql.plugin.hive.metastore.CachingHiveMetastore; +import io.prestosql.plugin.hive.metastore.HiveMetastore; +import io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore; +import io.prestosql.plugin.hive.security.AccessControlMetadataFactory; +import io.prestosql.plugin.hive.statistics.MetastoreHiveStatisticsProvider; +import io.prestosql.spi.type.TypeManager; + +import javax.inject.Inject; + +import java.util.Optional; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledExecutorService; + +import static java.util.Objects.requireNonNull; + +public class MppMetadataFactory + extends HiveMetadataFactory +{ + private final boolean skipDeletionForAlter; + private final boolean skipTargetCleanupOnRollback; + private final boolean writesToNonManagedTablesEnabled; + private final boolean createsOfNonManagedTablesEnabled; + private final boolean tableCreatesWithLocationAllowed; + private final long perTransactionCacheMaximumSize; + private final HiveMetastore metastore; + private final HdfsEnvironment hdfsEnvironment; + private final HivePartitionManager partitionManager; + private final TypeManager typeManager; + private final LocationService locationService; + private final JsonCodec partitionUpdateCodec; + private final BoundedExecutor renameExecution; + private final ScheduledExecutorService hiveVacuumService; + private final TypeTranslator typeTranslator; + private final String prestoVersion; + private final AccessControlMetadataFactory accessControlMetadataFactory; + private final Optional hiveTransactionHeartbeatInterval; + private final ScheduledExecutorService heartbeatService; + private final ScheduledExecutorService hiveMetastoreClientService; + private final Duration vacuumCleanupRecheckInterval; + private final int vacuumDeltaNumThreshold; + private final double vacuumDeltaPercentThreshold; + private final boolean autoVacuumEnabled; + private Optional vacuumCollectorInterval; + protected final int hmsWriteBatchSize; + private Scheduler scheduler; + private MppConfig mppConfig; + + @Inject + @SuppressWarnings("deprecation") + public MppMetadataFactory( + HiveConfig hiveConfig, + HiveMetastore metastore, + HdfsEnvironment hdfsEnvironment, + HivePartitionManager partitionManager, + @ForHive ExecutorService executorService, + @ForHiveVacuum ScheduledExecutorService hiveVacuumService, + @ForHiveMetastore ScheduledExecutorService hiveMetastoreClientService, + @ForHiveTransactionHeartbeats ScheduledExecutorService heartbeatService, + TypeManager typeManager, + LocationService locationService, + JsonCodec partitionUpdateCodec, + TypeTranslator typeTranslator, + NodeVersion nodeVersion, + AccessControlMetadataFactory accessControlMetadataFactory, + Scheduler scheduler, + MppConfig mppConfig) + { + this( + metastore, + hdfsEnvironment, + partitionManager, + hiveConfig.getMaxConcurrentFileRenames(), + hiveConfig.isSkipDeletionForAlter(), + hiveConfig.isSkipTargetCleanupOnRollback(), + hiveConfig.getWritesToNonManagedTablesEnabled(), + hiveConfig.getCreatesOfNonManagedTablesEnabled(), + hiveConfig.getTableCreatesWithLocationAllowed(), + hiveConfig.getPerTransactionMetastoreCacheMaximumSize(), + hiveConfig.getHiveTransactionHeartbeatInterval(), + hiveConfig.getVacuumCleanupRecheckInterval(), + typeManager, + locationService, + partitionUpdateCodec, + executorService, + hiveVacuumService, + heartbeatService, + hiveMetastoreClientService, + typeTranslator, + nodeVersion.toString(), + accessControlMetadataFactory, + hiveConfig.getVacuumDeltaNumThreshold(), + hiveConfig.getVacuumDeltaPercentThreshold(), + hiveConfig.getAutoVacuumEnabled(), + hiveConfig.getVacuumCollectorInterval(), + hiveConfig.getMetastoreWriteBatchSize(), + scheduler, + mppConfig); + } + + public MppMetadataFactory( + HiveMetastore metastore, + HdfsEnvironment hdfsEnvironment, + HivePartitionManager partitionManager, + int maxConcurrentFileRenames, + boolean skipDeletionForAlter, + boolean skipTargetCleanupOnRollback, + boolean writesToNonManagedTablesEnabled, + boolean createsOfNonManagedTablesEnabled, + boolean tableCreatesWithLocationAllowed, + long perTransactionCacheMaximumSize, + Optional hiveTransactionHeartbeatInterval, + Duration vacuumCleanupRecheckInterval, + TypeManager typeManager, + LocationService locationService, + JsonCodec partitionUpdateCodec, + ExecutorService executorService, + ScheduledExecutorService hiveVacuumService, + ScheduledExecutorService heartbeatService, + ScheduledExecutorService hiveMetastoreClientService, + TypeTranslator typeTranslator, + String prestoVersion, + AccessControlMetadataFactory accessControlMetadataFactory, + int vacuumDeltaNumThreshold, + double vacuumDeltaPercentThreshold, + boolean autoVacuumEnabled, + Optional vacuumCollectorInterval, + int hmsWriteBatchSize, + Scheduler scheduler, + MppConfig mppConfig) + { + super(metastore, hdfsEnvironment, partitionManager, maxConcurrentFileRenames, + skipDeletionForAlter, skipTargetCleanupOnRollback, writesToNonManagedTablesEnabled, + createsOfNonManagedTablesEnabled, tableCreatesWithLocationAllowed, perTransactionCacheMaximumSize, + hiveTransactionHeartbeatInterval, vacuumCleanupRecheckInterval, typeManager, locationService, + partitionUpdateCodec, executorService, hiveVacuumService, heartbeatService, + hiveMetastoreClientService, typeTranslator, prestoVersion, accessControlMetadataFactory, + vacuumDeltaNumThreshold, vacuumDeltaPercentThreshold, autoVacuumEnabled, vacuumCollectorInterval, hmsWriteBatchSize); + + this.skipDeletionForAlter = skipDeletionForAlter; + this.skipTargetCleanupOnRollback = skipTargetCleanupOnRollback; + this.writesToNonManagedTablesEnabled = writesToNonManagedTablesEnabled; + this.createsOfNonManagedTablesEnabled = createsOfNonManagedTablesEnabled; + this.tableCreatesWithLocationAllowed = tableCreatesWithLocationAllowed; + this.perTransactionCacheMaximumSize = perTransactionCacheMaximumSize; + + this.metastore = requireNonNull(metastore, "metastore is null"); + this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); + this.partitionManager = requireNonNull(partitionManager, "partitionManager is null"); + this.typeManager = requireNonNull(typeManager, "typeManager is null"); + this.locationService = requireNonNull(locationService, "locationService is null"); + this.partitionUpdateCodec = requireNonNull(partitionUpdateCodec, "partitionUpdateCodec is null"); + this.typeTranslator = requireNonNull(typeTranslator, "typeTranslator is null"); + this.prestoVersion = requireNonNull(prestoVersion, "prestoVersion is null"); + this.accessControlMetadataFactory = requireNonNull(accessControlMetadataFactory, "accessControlMetadataFactory is null"); + this.hiveTransactionHeartbeatInterval = requireNonNull(hiveTransactionHeartbeatInterval, "hiveTransactionHeartbeatInterval is null"); + this.vacuumCleanupRecheckInterval = requireNonNull(vacuumCleanupRecheckInterval, "vacuumCleanupInterval is null"); + + renameExecution = new BoundedExecutor(executorService, maxConcurrentFileRenames); + this.hiveVacuumService = requireNonNull(hiveVacuumService, "hiveVacuumService is null"); + this.heartbeatService = requireNonNull(heartbeatService, "heartbeatService is null"); + this.hiveMetastoreClientService = requireNonNull(hiveMetastoreClientService, "heartbeatService is null"); + this.vacuumDeltaNumThreshold = vacuumDeltaNumThreshold; + this.vacuumDeltaPercentThreshold = vacuumDeltaPercentThreshold; + this.autoVacuumEnabled = autoVacuumEnabled; + this.vacuumCollectorInterval = vacuumCollectorInterval; + this.hmsWriteBatchSize = hmsWriteBatchSize; + this.scheduler = scheduler; + this.mppConfig = mppConfig; + } + + @Override + public HiveMetadata get() + { + SemiTransactionalHiveMetastore metastore = new SemiTransactionalHiveMetastore( + hdfsEnvironment, + CachingHiveMetastore.memoizeMetastore(this.metastore, perTransactionCacheMaximumSize), // per-transaction cache + renameExecution, + hiveVacuumService, + vacuumCleanupRecheckInterval, + skipDeletionForAlter, + skipTargetCleanupOnRollback, + hiveTransactionHeartbeatInterval, + heartbeatService, + hiveMetastoreClientService, + hmsWriteBatchSize); + + return new MppMetadata( + metastore, + hdfsEnvironment, + partitionManager, + writesToNonManagedTablesEnabled, + createsOfNonManagedTablesEnabled, + tableCreatesWithLocationAllowed, + typeManager, + locationService, + partitionUpdateCodec, + typeTranslator, + prestoVersion, + new MetastoreHiveStatisticsProvider(metastore, statsCache, samplePartitionCache), + accessControlMetadataFactory.create(metastore), + autoVacuumEnabled, + vacuumDeltaNumThreshold, + vacuumDeltaPercentThreshold, + hiveVacuumService, + vacuumCollectorInterval, + hiveMetastoreClientService, + scheduler, + mppConfig); + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppModule.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppModule.java new file mode 100644 index 000000000..40f7b5870 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppModule.java @@ -0,0 +1,173 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import com.google.common.cache.CacheLoader; +import com.google.inject.Binder; +import com.google.inject.Scopes; +import com.google.inject.TypeLiteral; +import com.google.inject.multibindings.Multibinder; +import io.airlift.event.client.EventClient; +import io.hetu.core.plugin.mpp.scheduler.Scheduler; +import io.prestosql.orc.BloomFilterCacheStatsLister; +import io.prestosql.orc.FileTailCacheStatsLister; +import io.prestosql.orc.RowDataCacheStatsLister; +import io.prestosql.orc.RowIndexCacheStatsLister; +import io.prestosql.orc.StripeFooterCacheStatsLister; +import io.prestosql.plugin.hive.CachingDirectoryLister; +import io.prestosql.plugin.hive.CoercionPolicy; +import io.prestosql.plugin.hive.DirectoryLister; +import io.prestosql.plugin.hive.DynamicConfigurationProvider; +import io.prestosql.plugin.hive.FileFormatDataSourceStats; +import io.prestosql.plugin.hive.GenericHiveRecordCursorProvider; +import io.prestosql.plugin.hive.HdfsConfiguration; +import io.prestosql.plugin.hive.HdfsConfigurationInitializer; +import io.prestosql.plugin.hive.HdfsEnvironment; +import io.prestosql.plugin.hive.HiveAnalyzeProperties; +import io.prestosql.plugin.hive.HiveCoercionPolicy; +import io.prestosql.plugin.hive.HiveConfig; +import io.prestosql.plugin.hive.HiveEventClient; +import io.prestosql.plugin.hive.HiveFileWriterFactory; +import io.prestosql.plugin.hive.HiveHdfsConfiguration; +import io.prestosql.plugin.hive.HiveLocationService; +import io.prestosql.plugin.hive.HiveMetadataFactory; +import io.prestosql.plugin.hive.HiveModule; +import io.prestosql.plugin.hive.HiveNodePartitioningProvider; +import io.prestosql.plugin.hive.HivePageSinkProvider; +import io.prestosql.plugin.hive.HivePageSourceFactory; +import io.prestosql.plugin.hive.HivePageSourceProvider; +import io.prestosql.plugin.hive.HivePartitionManager; +import io.prestosql.plugin.hive.HiveRecordCursorProvider; +import io.prestosql.plugin.hive.HiveSelectivePageSourceFactory; +import io.prestosql.plugin.hive.HiveSessionProperties; +import io.prestosql.plugin.hive.HiveTableProperties; +import io.prestosql.plugin.hive.HiveTransactionManager; +import io.prestosql.plugin.hive.HiveTypeTranslator; +import io.prestosql.plugin.hive.HiveWriterStats; +import io.prestosql.plugin.hive.LocationService; +import io.prestosql.plugin.hive.NamenodeStats; +import io.prestosql.plugin.hive.OrcFileWriterConfig; +import io.prestosql.plugin.hive.OrcFileWriterFactory; +import io.prestosql.plugin.hive.ParquetFileWriterConfig; +import io.prestosql.plugin.hive.PartitionUpdate; +import io.prestosql.plugin.hive.RcFileFileWriterFactory; +import io.prestosql.plugin.hive.S3SelectRecordCursorProvider; +import io.prestosql.plugin.hive.TransactionalMetadata; +import io.prestosql.plugin.hive.TypeTranslator; +import io.prestosql.plugin.hive.orc.OrcPageSourceFactory; +import io.prestosql.plugin.hive.orc.OrcSelectivePageSourceFactory; +import io.prestosql.plugin.hive.parquet.ParquetPageSourceFactory; +import io.prestosql.plugin.hive.rcfile.RcFilePageSourceFactory; +import io.prestosql.plugin.hive.s3.PrestoS3ClientFactory; +import io.prestosql.plugin.hive.util.IndexCache; +import io.prestosql.plugin.hive.util.IndexCacheLoader; +import io.prestosql.spi.connector.ConnectorNodePartitioningProvider; +import io.prestosql.spi.connector.ConnectorPageSinkProvider; +import io.prestosql.spi.connector.ConnectorPageSourceProvider; +import io.prestosql.spi.connector.ConnectorSplitManager; + +import java.util.function.Supplier; + +import static com.google.inject.multibindings.Multibinder.newSetBinder; +import static io.airlift.configuration.ConfigBinder.configBinder; +import static io.airlift.json.JsonCodecBinder.jsonCodecBinder; +import static org.weakref.jmx.guice.ExportBinder.newExporter; + +public class MppModule + extends HiveModule +{ + @Override + public void configure(Binder binder) + { + binder.bind(TypeTranslator.class).toInstance(new HiveTypeTranslator()); + binder.bind(CoercionPolicy.class).to(HiveCoercionPolicy.class).in(Scopes.SINGLETON); + + binder.bind(HdfsConfigurationInitializer.class).in(Scopes.SINGLETON); + newSetBinder(binder, DynamicConfigurationProvider.class); + binder.bind(HdfsConfiguration.class).to(HiveHdfsConfiguration.class).in(Scopes.SINGLETON); + binder.bind(HdfsEnvironment.class).in(Scopes.SINGLETON); + binder.bind(DirectoryLister.class).to(CachingDirectoryLister.class).in(Scopes.SINGLETON); + configBinder(binder).bindConfig(HiveConfig.class); + + binder.bind(HiveSessionProperties.class).in(Scopes.SINGLETON); + binder.bind(HiveTableProperties.class).in(Scopes.SINGLETON); + binder.bind(HiveAnalyzeProperties.class).in(Scopes.SINGLETON); + + binder.bind(NamenodeStats.class).in(Scopes.SINGLETON); + newExporter(binder).export(NamenodeStats.class).withGeneratedName(); + + binder.bind(PrestoS3ClientFactory.class).in(Scopes.SINGLETON); + + binder.bind(CachingDirectoryLister.class).in(Scopes.SINGLETON); + newExporter(binder).export(CachingDirectoryLister.class).withGeneratedName(); + + Multibinder recordCursorProviderBinder = newSetBinder(binder, HiveRecordCursorProvider.class); + recordCursorProviderBinder.addBinding().to(S3SelectRecordCursorProvider.class).in(Scopes.SINGLETON); + recordCursorProviderBinder.addBinding().to(GenericHiveRecordCursorProvider.class).in(Scopes.SINGLETON); + + binder.bind(HiveWriterStats.class).in(Scopes.SINGLETON); + newExporter(binder).export(HiveWriterStats.class).withGeneratedName(); + + newSetBinder(binder, EventClient.class).addBinding().to(HiveEventClient.class).in(Scopes.SINGLETON); + binder.bind(HivePartitionManager.class).in(Scopes.SINGLETON); + binder.bind(LocationService.class).to(HiveLocationService.class).in(Scopes.SINGLETON); + binder.bind(HiveMetadataFactory.class).to(MppMetadataFactory.class).in(Scopes.SINGLETON); + binder.bind(new TypeLiteral>() {}).to(HiveMetadataFactory.class).in(Scopes.SINGLETON); + binder.bind(HiveTransactionManager.class).in(Scopes.SINGLETON); + binder.bind(ConnectorSplitManager.class).to(MppSplitManager.class).in(Scopes.SINGLETON); + newExporter(binder).export(ConnectorSplitManager.class).as(generator -> generator.generatedNameOf(MppSplitManager.class)); + binder.bind(ConnectorPageSourceProvider.class).to(HivePageSourceProvider.class).in(Scopes.SINGLETON); + binder.bind(ConnectorPageSinkProvider.class).to(HivePageSinkProvider.class).in(Scopes.SINGLETON); + binder.bind(ConnectorNodePartitioningProvider.class).to(HiveNodePartitioningProvider.class).in(Scopes.SINGLETON); + + jsonCodecBinder(binder).bindJsonCodec(PartitionUpdate.class); + + binder.bind(FileFormatDataSourceStats.class).in(Scopes.SINGLETON); + newExporter(binder).export(FileFormatDataSourceStats.class).withGeneratedName(); + + Multibinder pageSourceFactoryBinder = newSetBinder(binder, HivePageSourceFactory.class); + pageSourceFactoryBinder.addBinding().to(OrcPageSourceFactory.class).in(Scopes.SINGLETON); + pageSourceFactoryBinder.addBinding().to(ParquetPageSourceFactory.class).in(Scopes.SINGLETON); + pageSourceFactoryBinder.addBinding().to(RcFilePageSourceFactory.class).in(Scopes.SINGLETON); + + Multibinder selectivePageSourceFactoryBinder = newSetBinder(binder, HiveSelectivePageSourceFactory.class); + selectivePageSourceFactoryBinder.addBinding().to(OrcSelectivePageSourceFactory.class).in(Scopes.SINGLETON); + + Multibinder fileWriterFactoryBinder = newSetBinder(binder, HiveFileWriterFactory.class); + binder.bind(OrcFileWriterFactory.class).in(Scopes.SINGLETON); + newExporter(binder).export(OrcFileWriterFactory.class).withGeneratedName(); + configBinder(binder).bindConfig(OrcFileWriterConfig.class); + fileWriterFactoryBinder.addBinding().to(OrcFileWriterFactory.class).in(Scopes.SINGLETON); + fileWriterFactoryBinder.addBinding().to(RcFileFileWriterFactory.class).in(Scopes.SINGLETON); + + configBinder(binder).bindConfig(ParquetFileWriterConfig.class); + + binder.bind(CacheLoader.class).to(IndexCacheLoader.class).in(Scopes.SINGLETON); + binder.bind(IndexCache.class).in(Scopes.SINGLETON); + + binder.bind(FileTailCacheStatsLister.class).in(Scopes.SINGLETON); + newExporter(binder).export(FileTailCacheStatsLister.class).withGeneratedName(); + binder.bind(StripeFooterCacheStatsLister.class).in(Scopes.SINGLETON); + newExporter(binder).export(StripeFooterCacheStatsLister.class).withGeneratedName(); + binder.bind(RowIndexCacheStatsLister.class).in(Scopes.SINGLETON); + newExporter(binder).export(RowIndexCacheStatsLister.class).withGeneratedName(); + binder.bind(BloomFilterCacheStatsLister.class).in(Scopes.SINGLETON); + newExporter(binder).export(BloomFilterCacheStatsLister.class).withGeneratedName(); + binder.bind(RowDataCacheStatsLister.class).in(Scopes.SINGLETON); + newExporter(binder).export(RowDataCacheStatsLister.class).withGeneratedName(); + + configBinder(binder).bindConfig(MppConfig.class); + binder.bind(Scheduler.class).in(Scopes.SINGLETON); + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppPlugin.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppPlugin.java new file mode 100644 index 000000000..4d89d1368 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppPlugin.java @@ -0,0 +1,35 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import com.google.common.collect.ImmutableList; +import io.prestosql.plugin.hive.HivePlugin; +import io.prestosql.spi.connector.ConnectorFactory; + +import java.util.Optional; + +public class MppPlugin + extends HivePlugin +{ + public MppPlugin() + { + super("mpp"); + } + + @Override + public Iterable getConnectorFactories() + { + return ImmutableList.of(new MppConnectorFactory("mpp", MppPlugin.class.getClassLoader(), Optional.empty())); + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java new file mode 100644 index 000000000..60be6e97b --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java @@ -0,0 +1,361 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Ordering; +import io.airlift.concurrent.BoundedExecutor; +import io.airlift.log.Logger; +import io.airlift.stats.CounterStat; +import io.airlift.units.DataSize; +import io.hetu.core.plugin.mpp.scheduler.entity.ETLInfo; +import io.hetu.core.plugin.mpp.scheduler.utils.Const; +import io.hetu.core.plugin.mpp.scheduler.utils.Util; +import io.prestosql.plugin.hive.BackgroundHiveSplitLoader; +import io.prestosql.plugin.hive.CoercionPolicy; +import io.prestosql.plugin.hive.DirectoryLister; +import io.prestosql.plugin.hive.ForHive; +import io.prestosql.plugin.hive.HdfsEnvironment; +import io.prestosql.plugin.hive.HiveBucketHandle; +import io.prestosql.plugin.hive.HiveBucketing; +import io.prestosql.plugin.hive.HiveConfig; +import io.prestosql.plugin.hive.HiveMetadata; +import io.prestosql.plugin.hive.HiveNotReadableException; +import io.prestosql.plugin.hive.HivePartition; +import io.prestosql.plugin.hive.HivePartitionManager; +import io.prestosql.plugin.hive.HivePartitionMetadata; +import io.prestosql.plugin.hive.HiveSplitLoader; +import io.prestosql.plugin.hive.HiveSplitManager; +import io.prestosql.plugin.hive.HiveSplitSource; +import io.prestosql.plugin.hive.HiveStorageFormat; +import io.prestosql.plugin.hive.HiveTableHandle; +import io.prestosql.plugin.hive.HiveTransactionHandle; +import io.prestosql.plugin.hive.HiveVacuumSplitSource; +import io.prestosql.plugin.hive.HiveVacuumTableHandle; +import io.prestosql.plugin.hive.NamenodeStats; +import io.prestosql.plugin.hive.authentication.HiveIdentity; +import io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore; +import io.prestosql.plugin.hive.metastore.Table; +import io.prestosql.spi.PrestoException; +import io.prestosql.spi.VersionEmbedder; +import io.prestosql.spi.connector.ColumnMetadata; +import io.prestosql.spi.connector.ConnectorSession; +import io.prestosql.spi.connector.ConnectorSplitSource; +import io.prestosql.spi.connector.ConnectorTableHandle; +import io.prestosql.spi.connector.ConnectorTransactionHandle; +import io.prestosql.spi.connector.FixedSplitSource; +import io.prestosql.spi.connector.SchemaTableName; +import io.prestosql.spi.connector.TableNotFoundException; +import io.prestosql.spi.dynamicfilter.DynamicFilter; +import io.prestosql.spi.predicate.TupleDomain; +import io.prestosql.spi.resourcegroups.QueryType; +import io.prestosql.spi.type.TypeManager; + +import javax.annotation.Nullable; +import javax.inject.Inject; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.RejectedExecutionException; +import java.util.function.Function; +import java.util.function.Supplier; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Strings.isNullOrEmpty; +import static io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; +import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; +import static io.prestosql.spi.StandardErrorCode.SERVER_SHUTTING_DOWN; +import static io.prestosql.spi.connector.ConnectorSplitManager.SplitSchedulingStrategy.GROUPED_SCHEDULING; +import static java.util.Objects.requireNonNull; + +public class MppSplitManager + extends HiveSplitManager +{ + public static Logger logger = Logger.get(MppSplitManager.class); + public static final String OBJECT_NOT_READABLE = "object_not_readable"; + + private final Function metastoreProvider; + private final HivePartitionManager partitionManager; + private final NamenodeStats namenodeStats; + private final HdfsEnvironment hdfsEnvironment; + private final DirectoryLister directoryLister; + private final Executor executor; + private final CoercionPolicy coercionPolicy; + private final int maxOutstandingSplits; + private final DataSize maxOutstandingSplitsSize; + private final int minPartitionBatchSize; + private final int maxPartitionBatchSize; + private final int maxInitialSplits; + private final int splitLoaderConcurrency; + private final int maxSplitsPerSecond; + private final boolean recursiveDfsWalkerEnabled; + private final CounterStat highMemorySplitSourceCounter; + private final TypeManager typeManager; + private final HiveConfig hiveConfig; + + @Inject + public MppSplitManager( + HiveConfig hiveConfig, + Function metastoreProvider, + HivePartitionManager partitionManager, + NamenodeStats namenodeStats, + HdfsEnvironment hdfsEnvironment, + DirectoryLister directoryLister, + @ForHive ExecutorService executorService, + VersionEmbedder versionEmbedder, + TypeManager typeManager, + CoercionPolicy coercionPolicy) + { + this( + metastoreProvider, + partitionManager, + namenodeStats, + hdfsEnvironment, + directoryLister, + versionEmbedder.embedVersion(new BoundedExecutor(executorService, hiveConfig.getMaxSplitIteratorThreads())), + coercionPolicy, + new CounterStat(), + hiveConfig.getMaxOutstandingSplits(), + hiveConfig.getMaxOutstandingSplitsSize(), + hiveConfig.getMinPartitionBatchSize(), + hiveConfig.getMaxPartitionBatchSize(), + hiveConfig.getMaxInitialSplits(), + hiveConfig.getSplitLoaderConcurrency(), + hiveConfig.getMaxSplitsPerSecond(), + hiveConfig.getRecursiveDirWalkerEnabled(), + typeManager, + hiveConfig); + } + + public MppSplitManager( + Function metastoreProvider, + HivePartitionManager partitionManager, + NamenodeStats namenodeStats, + HdfsEnvironment hdfsEnvironment, + DirectoryLister directoryLister, + Executor executor, + CoercionPolicy coercionPolicy, + CounterStat highMemorySplitSourceCounter, + int maxOutstandingSplits, + DataSize maxOutstandingSplitsSize, + int minPartitionBatchSize, + int maxPartitionBatchSize, + int maxInitialSplits, + int splitLoaderConcurrency, + @Nullable Integer maxSplitsPerSecond, + boolean recursiveDfsWalkerEnabled, + TypeManager typeManager, + HiveConfig hiveConfig) + { + super(metastoreProvider, partitionManager, namenodeStats, + hdfsEnvironment, directoryLister, executor, coercionPolicy, + highMemorySplitSourceCounter, maxOutstandingSplits, + maxOutstandingSplitsSize, minPartitionBatchSize, + maxPartitionBatchSize, maxInitialSplits, splitLoaderConcurrency, + maxSplitsPerSecond, recursiveDfsWalkerEnabled, typeManager, hiveConfig); + + this.metastoreProvider = requireNonNull(metastoreProvider, "metastore is null"); + this.partitionManager = requireNonNull(partitionManager, "partitionManager is null"); + this.namenodeStats = requireNonNull(namenodeStats, "namenodeStats is null"); + this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); + this.directoryLister = requireNonNull(directoryLister, "directoryLister is null"); + this.executor = new ErrorCodedExecutor(executor); + this.coercionPolicy = requireNonNull(coercionPolicy, "coercionPolicy is null"); + this.highMemorySplitSourceCounter = requireNonNull(highMemorySplitSourceCounter, "highMemorySplitSourceCounter is null"); + checkArgument(maxOutstandingSplits >= 1, "maxOutstandingSplits must be at least 1"); + this.maxOutstandingSplits = maxOutstandingSplits; + this.maxOutstandingSplitsSize = maxOutstandingSplitsSize; + this.minPartitionBatchSize = minPartitionBatchSize; + this.maxPartitionBatchSize = maxPartitionBatchSize; + this.maxInitialSplits = maxInitialSplits; + this.splitLoaderConcurrency = splitLoaderConcurrency; + this.maxSplitsPerSecond = firstNonNull(maxSplitsPerSecond, Integer.MAX_VALUE); + this.recursiveDfsWalkerEnabled = recursiveDfsWalkerEnabled; + this.typeManager = typeManager; + this.hiveConfig = hiveConfig; + } + + @Override + public ConnectorSplitSource getSplits( + ConnectorTransactionHandle transaction, + ConnectorSession session, + ConnectorTableHandle tableHandle, + SplitSchedulingStrategy splitSchedulingStrategy, + Supplier>> dynamicFilterSupplier, + Optional queryType, + Map queryInfo, + Set> userDefinedCachePredicates, + boolean partOfReuse) + { + HiveTableHandle hiveTable = (HiveTableHandle) tableHandle; + SchemaTableName tableName = hiveTable.getSchemaTableName(); + + String tblIdentifier = tableName.getSchemaName() + "." + tableName.getTableName(); + Integer recode = Const.tableStatus.get(tblIdentifier) != null ? Const.tableStatus.get(tblIdentifier) : -1; + + if (recode == -1) { + logger.info("Have not find the gaussdb table's status info, maybe this is a reuse scheduler!"); + } + else { + logger.info("Find the gaussdb table's etl status info!"); + while (recode != 1) { + try { + Thread.sleep(2000); + logger.info("Waitting to complete GDS process transporting data to alluxio"); + } + catch (InterruptedException e) { + e.printStackTrace(); + } + recode = Const.tableStatus.get(tblIdentifier); + } + + Const.tableStatus.remove(tblIdentifier); + ETLInfo etlInfo = Const.etlInfoMap.get(tblIdentifier); + Const.etlInfoMap.put(tblIdentifier, new ETLInfo(1, etlInfo.getStartTime(), Util.getDate())); + } + + // get table metadata + SemiTransactionalHiveMetastore metastore = metastoreProvider.apply((HiveTransactionHandle) transaction); + Table table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()) + .orElseThrow(() -> new TableNotFoundException(tableName)); + if (table.getStorage().getStorageFormat().getInputFormat().contains("carbon")) { + throw new PrestoException(NOT_SUPPORTED, "Hive connector can't read carbondata tables"); + } + + // verify table is not marked as non-readable + String tableNotReadable = table.getParameters().get(OBJECT_NOT_READABLE); + if (!isNullOrEmpty(tableNotReadable)) { + throw new HiveNotReadableException(tableName, Optional.empty(), tableNotReadable); + } + + // get partitions + List partitions = partitionManager.getOrLoadPartitions(session, metastore, new HiveIdentity(session), hiveTable); + + // short circuit if we don't have any partitions + if (partitions.isEmpty()) { + return new FixedSplitSource(ImmutableList.of()); + } + + // get buckets from first partition (arbitrary) + Optional bucketFilter = hiveTable.getBucketFilter(); + + // validate bucket bucketed execution + Optional bucketHandle = hiveTable.getBucketHandle(); + if ((splitSchedulingStrategy == GROUPED_SCHEDULING) && !bucketHandle.isPresent()) { + throw new PrestoException(GENERIC_INTERNAL_ERROR, "SchedulingPolicy is bucketed, but BucketHandle is not present"); + } + + // sort partitions + partitions = Ordering.natural().onResultOf(HivePartition::getPartitionId).reverse().sortedCopy(partitions); + + Iterable hivePartitions = getPartitionMetadata(session, metastore, table, tableName, partitions, bucketHandle.map(HiveBucketHandle::toTableBucketProperty)); + + HiveSplitLoader hiveSplitLoader = new BackgroundHiveSplitLoader( + table, + hivePartitions, + hiveTable.getCompactEffectivePredicate(), + BackgroundHiveSplitLoader.BucketSplitInfo.createBucketSplitInfo(bucketHandle, bucketFilter), + session, + hdfsEnvironment, + namenodeStats, + directoryLister, + executor, + splitLoaderConcurrency, + recursiveDfsWalkerEnabled, + metastore.getValidWriteIds(session, hiveTable, queryType.map(t -> t == QueryType.VACUUM).orElse(false)) + .map(validTxnWriteIdList -> validTxnWriteIdList.getTableValidWriteIdList(table.getDatabaseName() + "." + table.getTableName())), + dynamicFilterSupplier, + queryType, + queryInfo, + typeManager); + + HiveSplitSource splitSource; + HiveStorageFormat hiveStorageFormat = HiveMetadata.extractHiveStorageFormat(table); + switch (splitSchedulingStrategy) { + case UNGROUPED_SCHEDULING: + splitSource = HiveSplitSource.allAtOnce( + session, + table.getDatabaseName(), + table.getTableName(), + partOfReuse ? 0 : maxInitialSplits, //For reuse, we should make sure to have same split size all time for a table. + maxOutstandingSplits, + maxOutstandingSplitsSize, + maxSplitsPerSecond, + hiveSplitLoader, + executor, + new CounterStat(), + dynamicFilterSupplier, + userDefinedCachePredicates, + typeManager, + hiveConfig, + hiveStorageFormat); + break; + case GROUPED_SCHEDULING: + splitSource = HiveSplitSource.bucketed( + session, + table.getDatabaseName(), + table.getTableName(), + partOfReuse ? 0 : maxInitialSplits, //For reuse, we should make sure to have same split size all time for a table. + maxOutstandingSplits, + maxOutstandingSplitsSize, + maxSplitsPerSecond, + hiveSplitLoader, + executor, + new CounterStat(), + dynamicFilterSupplier, + userDefinedCachePredicates, + typeManager, + hiveConfig, + hiveStorageFormat); + break; + default: + throw new IllegalArgumentException("Unknown splitSchedulingStrategy: " + splitSchedulingStrategy); + } + hiveSplitLoader.start(splitSource); + + if (queryType.isPresent() && queryType.get() == QueryType.VACUUM) { + HdfsEnvironment.HdfsContext hdfsContext = new HdfsEnvironment.HdfsContext(session, table.getDatabaseName(), table.getTableName()); + return new HiveVacuumSplitSource(splitSource, (HiveVacuumTableHandle) queryInfo.get("vacuumHandle"), hdfsEnvironment, hdfsContext, session); + } + + return splitSource; + } + + private static class ErrorCodedExecutor + implements Executor + { + private final Executor delegate; + + private ErrorCodedExecutor(Executor delegate) + { + this.delegate = requireNonNull(delegate, "delegate is null"); + } + + @Override + public void execute(Runnable command) + { + try { + delegate.execute(command); + } + catch (RejectedExecutionException e) { + throw new PrestoException(SERVER_SHUTTING_DOWN, "Server is shutting down", e); + } + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java new file mode 100644 index 000000000..3b721f6c0 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; + +public class RunningTaskHashMap +{ + public HashMap> hashMap; + + public RunningTaskHashMap() + { + this.hashMap = new HashMap<>(); + } + + public List put(String key, List value) + { + synchronized (this) { + hashMap.put(key, value); + return value; + } + } + + public List get(String key) + { + synchronized (this) { + if (hashMap.get(key) == null) { + List list = new CopyOnWriteArrayList<>(); + hashMap.put(key, list); + return hashMap.get(key); + } + else { + return hashMap.get(key); + } + } + } + + public List removeList(Object key) + { + synchronized (this) { + return hashMap.remove(key); + } + } + + public boolean removeThread(Object key, String threadName) + { + synchronized (this) { + if (hashMap.get(key) != null) { + return hashMap.get(key).remove(threadName); + } + else { + return true; + } + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java new file mode 100644 index 000000000..a0e6aa371 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java @@ -0,0 +1,256 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +import io.hetu.core.plugin.mpp.scheduler.utils.Const; +import net.jodah.expiringmap.ExpirationPolicy; +import net.jodah.expiringmap.ExpiringMap; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.concurrent.TimeUnit; + +public class SynchronizedHashMap +{ + public HashMap hashMap; + public String mapName; + public static final String TABLE_STATUS_MAP = "tableStatus"; + public static final String ETL_INFO_MAP = "etlInfo"; + + public static final String TABLE_SCHEMA_MAP = "schemas"; + + public HashMap> threadHashMap; + + public List tableStatusList; + + public ExpiringMap expiringMap; + + public HashMap gaussDbTaskStatusMap; + + public int second; + + private SynchronizedHashMap() + { + } + + public SynchronizedHashMap(String mapName) + { + this.mapName = mapName; + switch (mapName){ + case TABLE_STATUS_MAP: + threadHashMap = new HashMap(); + tableStatusList = new ArrayList<>(); + gaussDbTaskStatusMap = new HashMap<>(); + case ETL_INFO_MAP: + hashMap = new HashMap(); + default: + break; + } + } + + public SynchronizedHashMap(int second, int maxSize) + { + this.second = second; + this.mapName = TABLE_SCHEMA_MAP; + expiringMap = ExpiringMap.builder() + .maxSize(maxSize) + .expirationPolicy(ExpirationPolicy.ACCESSED) + .expiration(second, TimeUnit.SECONDS) + .build(); + } + + public V put(K key, V value, String threadName) + { + switch (mapName){ + case TABLE_STATUS_MAP: + synchronized (this) { + if (threadHashMap.get(threadName) == null) { + HashMap kvHashMap = new HashMap<>(); + V result = kvHashMap.put(key, value); + threadHashMap.put(threadName, kvHashMap); + if (value instanceof Integer) { + if (((Integer) value).intValue() == 0) { + tableStatusList.add(key); + } + } + return result; + } + else { + HashMap kvHashMap = threadHashMap.get(threadName); + V result = kvHashMap.put(key, value); + threadHashMap.put(threadName, kvHashMap); + if (value instanceof Integer) { + if (((Integer) value).intValue() == 0) { + tableStatusList.add(key); + } + } + return result; + } + } + case TABLE_SCHEMA_MAP: + synchronized (this) { + return expiringMap.put(key, value); + } + default: + synchronized (this) { + return hashMap.put(key, value); + } + } + } + + public V put(K key, V value) + { + switch (mapName) { + case TABLE_STATUS_MAP: + synchronized (this) { + String threadName = getThreadName(); + if (threadHashMap.get(threadName) == null) { + HashMap kvHashMap = new HashMap<>(); + V result = kvHashMap.put(key, value); + threadHashMap.put(threadName, kvHashMap); + if (value instanceof Integer) { + if (((Integer) value).intValue() == 0) { + tableStatusList.add(key); + } + } + return result; + } + else { + HashMap kvHashMap = threadHashMap.get(threadName); + V result = kvHashMap.put(key, value); + threadHashMap.put(threadName, kvHashMap); + if (value instanceof Integer) { + if (((Integer) value).intValue() == 0) { + tableStatusList.add(key); + } + } + return result; + } + } + case TABLE_SCHEMA_MAP: + synchronized (this) { + return expiringMap.put(key, value); + } + default: + synchronized (this) { + return hashMap.put(key, value); + } + } + } + + public String getThreadName() + { + String threadName = Thread.currentThread().getName(); + threadName = threadName.substring(0, threadName.lastIndexOf(Const.idSeparator)); + return threadName; + } + + /** + * get put remove + * @param key + * @return + */ + public boolean containsKey(Object key) + { + switch (mapName){ + case TABLE_STATUS_MAP: + synchronized (this) { + String threadName = getThreadName(); + if (!threadHashMap.containsKey(threadName)) { + return false; + } + else { + HashMap kvHashMap = threadHashMap.get(threadName); + boolean result = kvHashMap.containsKey(key); + return result; + } + } + case TABLE_SCHEMA_MAP: + synchronized (this) { + return expiringMap.containsKey(key); + } + default: + synchronized (this) { + return hashMap.containsKey(key); + } + } + } + + public V get(Object key) + { + switch (mapName){ + case TABLE_STATUS_MAP: + synchronized (this) { + String threadName = getThreadName(); + if (!threadHashMap.containsKey(threadName)) { + return null; + } + else { + HashMap kvHashMap = threadHashMap.get(threadName); + V value = kvHashMap.get(key); + return value; + } + } + case TABLE_SCHEMA_MAP: + synchronized (this) { + return expiringMap.get(key); + } + default: + synchronized (this) { + return hashMap.get(key); + } + } + } + + /** + * 仅限于tablestatus开头判断 + * @param key + * @return + */ + public boolean tableStatusKeysExists(Object key) + { + return tableStatusList.contains(key); + } + + public V remove(Object key) + { + switch (mapName){ + case TABLE_STATUS_MAP: + synchronized (this) { + String threadName = getThreadName(); + if (!threadHashMap.containsKey(threadName)) { + return null; + } + else { + HashMap kvHashMap = threadHashMap.get(threadName); + V value = kvHashMap.remove(key); + threadHashMap.remove(threadName); + //可能存在问题 + tableStatusList.remove(key); + return value; + } + } + case TABLE_SCHEMA_MAP: + synchronized (this) { + return expiringMap.remove(key); + } + default: + synchronized (this) { + return hashMap.remove(key); + } + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java new file mode 100644 index 000000000..48272a1e1 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java @@ -0,0 +1,28 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp; + +public class TableMoveLock +{ + private TableMoveLock() + { + } + + public static String getLock(String lock) + { + String internLock = lock.intern(); + return internLock; + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java new file mode 100755 index 000000000..bdfa37b75 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java @@ -0,0 +1,121 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler; + +import io.airlift.log.Logger; +import io.hetu.core.plugin.mpp.MppConfig; +import io.hetu.core.plugin.mpp.scheduler.db.GsussDBOptThread; +import io.hetu.core.plugin.mpp.scheduler.hadoop.HiveOpt; +import io.hetu.core.plugin.mpp.scheduler.utils.Const; + +import javax.inject.Inject; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Queue; + +public class Scheduler +{ + public static Logger logger = Logger.get(Scheduler.class); + public MppConfig mppConfig; + public Queue> gdsQueue; + + @Inject + public Scheduler(MppConfig mppConfig) + { + this.mppConfig = mppConfig; + this.gdsQueue = getGdsQueue(mppConfig); + } + + public Queue> getGdsQueue(MppConfig mppConfig) + { + String[] gdsArr = mppConfig.getGdsList().split(","); + Map gdsMaps = new HashMap<>(); + for (String gdsServer : gdsArr) { + gdsServer.split("\\|"); + gdsMaps.put(gdsServer.split("\\|")[0], gdsServer.split("\\|")[1]); + } + Queue> gdsQueue = new LinkedList<>(); + for (Map.Entry entry : gdsMaps.entrySet()) { + gdsQueue.add(entry); + } + return gdsQueue; + } + + public Map.Entry getGDS() + { + while (true) { + if (gdsQueue.isEmpty()) { + logger.info("GDS queue is empty, please wait..."); + try { + Thread.sleep(1000); + } + catch (InterruptedException e) { + e.printStackTrace(); + } + } + else { + return gdsQueue.poll(); + } + } + } + + public void prepareHiveExternalTable(Map schemas, String schemaName, String tableName) + { + logger.info("Get schemainfo from gaussDB by table name"); +// String hivedbName = gaussdbSchema; +// String hiveSchemaInfo = schemas.get("hiveSchema"); + String tblIdentifier = schemaName + "." + tableName; + + logger.info("Create hive foreign table using alluxio path by hiveserver2 service"); + String auxPath = mppConfig.getAuxUrl() + mppConfig.getBaseAux() + tableName; + HiveOpt.createExternalTable(mppConfig.getHiveUrl() + mppConfig.getHiveDb(), mppConfig.getHiveUser(), mppConfig.getHivePasswd(), + mppConfig.getHsqlDrop(), mppConfig.getHsqlCreate(), + tableName, schemas.get("hiveSchema"), auxPath); + } + + public void startGdsProcess(Map.Entry gdsServer, Map schemas, String schemaName, String tableName) + { + logger.info("Prepare gaussDB GDS process"); + String colSchemaInfo = schemas.get("columns"); + String gsSchemaInfo = schemas.get("gsSchema"); + + String gdsForeignLocation = gdsServer.getKey() + "/" + tableName; + + String createSQL = mppConfig.getGsqlCreate() + .replace("${gaussdb_name}", schemaName) + .replace("${table_name}", tableName) + .replace("${gds_foreign_location}", gdsForeignLocation) + .replace("${table_name}", tableName) + .replace("${schema_info}", gsSchemaInfo); + + String dropSQL = mppConfig.getGsqlDrop() + .replace("${gaussdb_name}", schemaName) + .replace("${table_name}", tableName); + + String insertSQL = mppConfig.getGsqlInsert() + .replace("${gaussdb_name}", schemaName) + .replace("${table_name}", tableName) + .replace("${schema_info}", colSchemaInfo); + String threadName = Const.tableStatus.getThreadName(); + + GsussDBOptThread gsussDBOptThread = new GsussDBOptThread(gdsQueue, gdsServer, + mppConfig.getGsDriver(), mppConfig.getGsUrl(), mppConfig.getGsUser(), mppConfig.getGsPasswd(), + dropSQL, createSQL, insertSQL, schemaName, tableName, mppConfig.getHiveDb(), threadName); + gsussDBOptThread.start(); + logger.info("GaussDB GDS process thread start"); + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java new file mode 100755 index 000000000..4216415c8 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java @@ -0,0 +1,150 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.db; + +import io.airlift.log.Logger; +import io.hetu.core.plugin.mpp.MppConfig; +import io.hetu.core.plugin.mpp.scheduler.entity.TableSchema; +import io.hetu.core.plugin.mpp.scheduler.utils.Const; +import io.hetu.core.plugin.mpp.scheduler.utils.Util; + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; +import java.util.Map; + +/** + * @author chengyijian + * @title: GsussDBOpt + * @projectName mpp-scheduler + * @description: GaussDB操作相关 + * @date 2021/8/1210:56 + */ + +public class GsussDBOpt +{ + public static Logger logger = Logger.get(GsussDBOpt.class); + + private GsussDBOpt() + { + } + + public static Connection getConnection(String driver, String url, String username, String passwd) + { + Connection conn = null; + try { + Class.forName(driver).getConstructor().newInstance(); + } + catch (Exception e) { + e.printStackTrace(); + return null; + } + + try { + conn = DriverManager.getConnection(url, username, passwd); + logger.info("GaussDB Connection succeed!"); + } + catch (Exception e) { + e.printStackTrace(); + return null; + } + return conn; + } + + public static void executeSql(Connection conn, String sql) + { + Statement stmt = null; + try { + stmt = conn.createStatement(); + boolean rc = stmt.execute(sql); + stmt.close(); + } + catch (SQLException e) { + if (stmt != null) { + try { + stmt.close(); + } + catch (SQLException e1) { + e1.printStackTrace(); + } + } + e.printStackTrace(); + } + } + + public static Map getSchemas(MppConfig mppConfig, String catalog, String schema, String tableName) + { + String tblIdentifier = catalog + "." + schema + "." + tableName; + + Map schemas = new HashMap<>(); + + if (Const.schemasMap.containsKey(tblIdentifier)) { + TableSchema tableSchema = Const.schemasMap.get(tblIdentifier); + logger.info(tblIdentifier + " schema has stored at " + tableSchema.getSchemaTime()); + schemas.put("columns", tableSchema.getColumns()); + schemas.put("gsSchema", tableSchema.getGsSchema()); + schemas.put("hiveSchema", tableSchema.getHiveSchema()); + return schemas; + } + else { + logger.info(tblIdentifier + " schema has not got it yet!"); + StringBuilder columns; + StringBuilder gsSchema; + StringBuilder hiveSchema; + + Connection conn = GsussDBOpt.getConnection(mppConfig.getGsDriver(), mppConfig.getGsUrl(), mppConfig.getGsUser(), mppConfig.getGsPasswd()); + + try { + DatabaseMetaData dm = conn.getMetaData(); + ResultSet rs = dm.getColumns(catalog, schema, tableName, null); + + columns = new StringBuilder(); + gsSchema = new StringBuilder(); + hiveSchema = new StringBuilder(); + + while (rs.next()) { + String columnName = rs.getString("COLUMN_NAME"); + String dataType = rs.getString("TYPE_NAME"); + int columnSize = rs.getInt("COLUMN_SIZE"); + int decimalDigits = rs.getInt("DECIMAL_DIGITS"); + + columns.append(columnName + ","); + gsSchema.append(columnName + " " + Util.getMappingGSType(dataType, columnSize, decimalDigits) + ","); + hiveSchema.append(columnName + " " + Util.getMappingHiveType(dataType, columnSize, decimalDigits) + ","); + } + String columnsTmp = columns.deleteCharAt(columns.length() - 1).toString(); + String gsSchemaTmp = gsSchema.deleteCharAt(gsSchema.length() - 1).toString(); + String hiveSchemaTmp = hiveSchema.deleteCharAt(hiveSchema.length() - 1).toString(); + String schemaTime = Util.getDate(); + + schemas.put("columns", columnsTmp); + schemas.put("gsSchema", gsSchemaTmp); + schemas.put("hiveSchema", hiveSchemaTmp); + + Const.schemasMap.put(tblIdentifier, new TableSchema(columnsTmp, gsSchemaTmp, hiveSchemaTmp, schemaTime)); + rs.close(); + conn.close(); + } + catch (SQLException e) { + e.printStackTrace(); + } + return schemas; + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java new file mode 100755 index 000000000..ea193f6c7 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java @@ -0,0 +1,149 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.db; + +import io.airlift.log.Logger; +import io.hetu.core.plugin.mpp.TableMoveLock; +import io.hetu.core.plugin.mpp.scheduler.utils.Const; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.List; +import java.util.Map; +import java.util.Queue; + +/** + * @author chengyijian + * @title: GsussDBOpt + * @projectName mpp-scheduler + * @description: GaussDB操作相关 + * @date 2021/8/1210:56 + */ +public class GsussDBOptThread + extends Thread +{ + public static Logger logger = Logger.get(GsussDBOptThread.class); + private String driver; + private String jdbcUrl; + private String username; + private String password; + + private String dropSQL; + private String createSQL; + private String insertSQL; + private String gaussdbSchema; + private String tableName; + private String hiveDb; + private String parentThreadName; + + private Queue> gdsQueue; + private Map.Entry gdsServer; + + public GsussDBOptThread(Queue> gdsQueue, Map.Entry gdsServer, + String driver, String jdbcUrl, String username, String password, + String dropSQL, String createSQL, String insertSQL, + String gaussdbSchema, String tableName, String hiveDb, String parentThreadName) + { + this.gdsQueue = gdsQueue; + this.gdsServer = gdsServer; + this.driver = driver; + this.jdbcUrl = jdbcUrl; + this.username = username; + this.password = password; + this.dropSQL = dropSQL; + this.createSQL = createSQL; + this.insertSQL = insertSQL; + this.gaussdbSchema = gaussdbSchema; + this.tableName = tableName; + this.hiveDb = hiveDb; + this.parentThreadName = parentThreadName; + } + + public Connection getConnection(String username, String passwd) + { + String driver = this.driver; + String jdbcUrl = this.jdbcUrl; + Connection conn = null; + try { + Class.forName(driver).getConstructor().newInstance(); + } + catch (Exception e) { + e.printStackTrace(); + return null; + } + + try { + conn = DriverManager.getConnection(jdbcUrl, username, passwd); + logger.info("Connection succeed!"); + } + catch (Exception e) { + e.printStackTrace(); + return null; + } + return conn; + } + + public static void optTable(Connection conn, String sql) + { + Statement stmt = null; + try { + stmt = conn.createStatement(); + boolean rc = stmt.execute(sql); + stmt.close(); + } + catch (SQLException e) { + if (stmt != null) { + try { + stmt.close(); + } + catch (SQLException e1) { + e1.printStackTrace(); + } + } + e.printStackTrace(); + } + } + + @Override + public void run() + { + try { + Connection conn = getConnection(username, password); + logger.info("GaussDB Drop Create Insert Operation Start"); + optTable(conn, dropSQL); + optTable(conn, createSQL); + optTable(conn, insertSQL); +// List runningThreadList = Const.runningThreadMap.get(gaussdbSchema + "." + tableName); + List runningThreadList = Const.runningThreadMap.get(hiveDb + "." + tableName); + synchronized (TableMoveLock.getLock(gaussdbSchema + "." + tableName)) { + for (String threadName : runningThreadList) { + Const.tableStatus.put(hiveDb + "." + tableName, 1, threadName); +// Const.tableStatus.put(gaussdbSchema + "." + tableName, 1, threadName); + Const.runningThreadMap.removeThread(hiveDb + "." + tableName, threadName); + } + } + + logger.info("GaussDB Operation End"); + gdsQueue.add(gdsServer); + logger.info(gdsServer.getKey() + ":" + gdsServer.getValue() + " has been free!"); + conn.close(); + } + catch (SQLException e) { + e.printStackTrace(); + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java new file mode 100644 index 000000000..3e343bc0d --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.entity; + +public class ETLInfo +{ +// status 0:etling 1:etled + private int status; + private String startTime; + private String endTime; + + public ETLInfo() + { + } + + public ETLInfo(int status, String startTime, String endTime) + { + this.status = status; + this.startTime = startTime; + this.endTime = endTime; + } + + public int getStatus() + { + return status; + } + + public void setStatus(int status) + { + this.status = status; + } + + public String getStartTime() + { + return startTime; + } + + public void setStartTime(String startTime) + { + this.startTime = startTime; + } + + public String getEndTime() + { + return endTime; + } + + public void setEndTime(String endTime) + { + this.endTime = endTime; + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java new file mode 100644 index 000000000..63a0c8844 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java @@ -0,0 +1,75 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.entity; + +public class TableSchema +{ + private String columns; + private String gsSchema; + private String hiveSchema; + private String schemaTime; + + public TableSchema() + { + } + + public TableSchema(String columns, String gsSchema, String hiveSchema, String schemaTime) + { + this.columns = columns; + this.gsSchema = gsSchema; + this.hiveSchema = hiveSchema; + this.schemaTime = schemaTime; + } + + public String getColumns() + { + return columns; + } + + public void setColumns(String columns) + { + this.columns = columns; + } + + public String getGsSchema() + { + return gsSchema; + } + + public void setGsSchema(String gsSchema) + { + this.gsSchema = gsSchema; + } + + public String getHiveSchema() + { + return hiveSchema; + } + + public void setHiveSchema(String hiveSchema) + { + this.hiveSchema = hiveSchema; + } + + public String getSchemaTime() + { + return schemaTime; + } + + public void setSchemaTime(String schemaTime) + { + this.schemaTime = schemaTime; + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java new file mode 100644 index 000000000..c7bdc872e --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java @@ -0,0 +1,56 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.hadoop; + +import io.airlift.log.Logger; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; + +public class HiveOpt +{ + public static Logger logger = Logger.get(HiveOpt.class); + + private HiveOpt() + { + } + + public static void createExternalTable(String hiveUrl, String hiveUser, String hivePasswd, String hsqlDrop, String hsqlCreate, + String tblName, String schemaInfo, String auxPath) + { + logger.info(tblName + ": Hive External Table has not existed, create it!"); + try { + Class.forName("org.apache.hive.jdbc.HiveDriver"); + Connection connection = DriverManager.getConnection(hiveUrl, hiveUser, hivePasswd); + Statement statement = connection.createStatement(); + String sqlDrop = hsqlDrop.replace("${table_name}", tblName); + String sqlCreate = hsqlCreate + .replace("${table_name}", tblName) + .replace("${schema_info}", schemaInfo) + .replace("${pipe_to_aux_base_path}", auxPath); + statement.execute(sqlDrop); + statement.execute(sqlCreate); + logger.info("Finished create hive foreign table"); + } + catch (ClassNotFoundException e) { + e.printStackTrace(); + } + catch (SQLException e) { + e.printStackTrace(); + } + } +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java new file mode 100755 index 000000000..418ba9946 --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.utils; + +import io.hetu.core.plugin.mpp.RunningTaskHashMap; +import io.hetu.core.plugin.mpp.SynchronizedHashMap; +import io.hetu.core.plugin.mpp.scheduler.entity.ETLInfo; +import io.hetu.core.plugin.mpp.scheduler.entity.TableSchema; + +public class Const +{ + private Const() + { + } + + /** + * Thread Communication + * tableName:String, status:int + * status:0-have created hive table; 1-have finished exported data into table + */ + public static SynchronizedHashMap tableStatus = new SynchronizedHashMap<>("tableStatus"); + + public static String idSeparator = "-"; + + public static SynchronizedHashMap etlInfoMap = new SynchronizedHashMap<>("etlInfo"); + + public static RunningTaskHashMap runningThreadMap = new RunningTaskHashMap(); + + public static SynchronizedHashMap schemasMap = new SynchronizedHashMap<>(100, 10000); +} diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java new file mode 100755 index 000000000..96dade13f --- /dev/null +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java @@ -0,0 +1,118 @@ +/* + * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.mpp.scheduler.utils; + +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.regex.Pattern; + +/** + * @author chengyijian + * @title: Util + * @projectName mpp-scheduler + * @description: TODO + * @date 2021/8/1713:54 + */ +public class Util +{ + private Util() + { + } + + public static String getDate() + { + Date date = new Date(); + SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss"); + return formatter.format(date); + } + + /** + * GaussDB 数据类型映射为Hive表的数据类型 + * + * @param gsType + * @return + */ + public static String getMappingHiveType(String gsType, int columnSize, int decimalDigits) + { + String hType = "string"; + + if (Pattern.matches("int.*", gsType)) { + hType = "int"; + } + else if (gsType.equals("float")) { + hType = "float"; + } + else if (gsType.equals("double")) { + hType = "double"; + } + else if (gsType.equals("numeric")) { + hType = "double"; + } + else if (gsType.equals("date")) { + hType = "date"; + } + else if (gsType.equals("bpchar")) { + hType = "string"; + } + else if (gsType.equals("varchar")) { + hType = "string"; + } + return hType; + } + + public static String getMappingGSType(String gsType, int columnSize, int decimalDigits) + { + String hType = "character varying(100)"; + + if (Pattern.matches("int.*", gsType)) { + hType = "integer"; + } + else if (gsType.equals("float")) { + hType = "float"; + } + else if (gsType.equals("double")) { + hType = "double"; + } + else if (gsType.equals("numeric")) { + hType = "numeric(" + columnSize + "," + decimalDigits + ")"; + } + else if (gsType.equals("date")) { + hType = "date"; + } + else if (gsType.equals("bpchar")) { + hType = "character(" + columnSize + ")"; + } + else if (gsType.equals("varchar")) { + hType = "character varying(" + columnSize + ")"; + } + return hType; + } + + public static String getGaussDBTable(String statement) + { + statement = statement.replaceAll("[\\t\\n\\r]", " ").replaceAll("\\,", " ,"); + if (Pattern.matches(".*\\/\\*\\*gds\\*\\*\\/.*", statement)) { + statement = statement.replaceAll("\\/\\*\\*gds\\*\\*\\/", ""); + + String[] tokens = statement.split(" "); + for (String token : tokens) { + if (Pattern.matches("gaussdb\\..*", token)) { + return token; + } + } + } + return null; + } +} diff --git a/hetu-server/src/main/provisio/hetu.xml b/hetu-server/src/main/provisio/hetu.xml index d61f35f6b..7fce19bf9 100644 --- a/hetu-server/src/main/provisio/hetu.xml +++ b/hetu-server/src/main/provisio/hetu.xml @@ -74,6 +74,12 @@ + + + + + + diff --git a/pom.xml b/pom.xml index 11d15c711..a66f294ab 100644 --- a/pom.xml +++ b/pom.xml @@ -157,6 +157,7 @@ hetu-greenplum hetu-clickhouse hetu-kylin + hetu-mpp @@ -290,6 +291,12 @@ ${project.version} + + io.hetu.core + hetu-mpp + ${project.version} + + io.hetu.core hetu-carbondata diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitLoader.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitLoader.java index 603146723..c459a30c5 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitLoader.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitLoader.java @@ -13,7 +13,7 @@ */ package io.prestosql.plugin.hive; -interface HiveSplitLoader +public interface HiveSplitLoader { void start(HiveSplitSource splitSource); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitManager.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitManager.java index 47fe70ac9..0dc7fa852 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitManager.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitManager.java @@ -313,7 +313,7 @@ public class HiveSplitManager return highMemorySplitSourceCounter; } - private Iterable getPartitionMetadata(ConnectorSession session, SemiTransactionalHiveMetastore metastore, Table table, SchemaTableName tableName, List hivePartitions, Optional bucketProperty) + public Iterable getPartitionMetadata(ConnectorSession session, SemiTransactionalHiveMetastore metastore, Table table, SchemaTableName tableName, List hivePartitions, Optional bucketProperty) { if (hivePartitions.isEmpty()) { return ImmutableList.of(); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitSource.java index 52d8a8f74..ed1a09ea5 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveSplitSource.java @@ -78,7 +78,7 @@ import static java.lang.Math.toIntExact; import static java.lang.String.format; import static java.util.Objects.requireNonNull; -class HiveSplitSource +public class HiveSplitSource implements ConnectorSplitSource { private static final Logger log = Logger.get(HiveSplitSource.class); diff --git a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveVacuumSplitSource.java b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveVacuumSplitSource.java index c04267b59..d3d4fef4c 100644 --- a/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveVacuumSplitSource.java +++ b/presto-hive/src/main/java/io/prestosql/plugin/hive/HiveVacuumSplitSource.java @@ -47,7 +47,7 @@ import static java.util.concurrent.CompletableFuture.completedFuture; * to same worker and together, to start vacuum operation. So class wrapps all such splits together and scheduled as * single split. */ -class HiveVacuumSplitSource +public class HiveVacuumSplitSource implements ConnectorSplitSource { private HiveSplitSource splitSource; @@ -63,7 +63,7 @@ class HiveVacuumSplitSource private HdfsEnvironment hdfsEnvironment; private HdfsContext hdfsContext; - HiveVacuumSplitSource(HiveSplitSource splitSource, HiveVacuumTableHandle vacuumTableHandle, HdfsEnvironment hdfsEnvironment, HdfsContext hdfsContext, ConnectorSession session) + public HiveVacuumSplitSource(HiveSplitSource splitSource, HiveVacuumTableHandle vacuumTableHandle, HdfsEnvironment hdfsEnvironment, HdfsContext hdfsContext, ConnectorSession session) { this.splitSource = splitSource; this.vacuumTableHandle = vacuumTableHandle; diff --git a/presto-main/etc/catalog/mpp.properties b/presto-main/etc/catalog/mpp.properties new file mode 100644 index 000000000..d7d6387fa --- /dev/null +++ b/presto-main/etc/catalog/mpp.properties @@ -0,0 +1,30 @@ + +connector.name=mpp +hive.metastore.uri=thrift://localhost:9083 + +#GDS baseinfo +gds-list=gsfs://localhost:port1|base_path +aux-url=alluxio://localhost:19998 +base-aux=/gdsdata/ +etl-reuse=false + +#hive info +hive-user=username +hive-passwd=password +hive-db=xxx_db + +hive-url=jdbc:hive2://localhost:10000/ +# hive template +hsql-drop=drop table if exists ${table_name} +hsql-create=CREATE EXTERNAL TABLE ${table_name} ( ${schema_info} ) COMMENT 'gds external table' ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' LOCATION '${pipe_to_aux_base_path}' + +# gsDB connection info +gs-driver=org.postgresql.Driver +gs-url=jdbc:postgresql://localhost:25308/schema +gs-user=user +gs-passwd=password + +# gaussdb template +gsql-create=create foreign table ${gaussdb_name}.ext_${table_name} ( ${schema_info} ) SERVER gsmpp_server OPTIONS ( LOCATION '${gds_foreign_location}', FORMAT 'text', DELIMITER E',', NULL '', encoding 'UTF-8', noescaping 'true', EOL E'\\n', out_filename_prefix '${table_name}') WRITE ONLY; +gsql-insert=insert into ${gaussdb_name}.ext_${table_name} select ${schema_info} from ${gaussdb_name}.${table_name}; +gsql-drop=drop foreign table if exists ${gaussdb_name}.ext_${table_name}; diff --git a/presto-main/etc/config.properties b/presto-main/etc/config.properties index a821a2a23..4439c46a0 100644 --- a/presto-main/etc/config.properties +++ b/presto-main/etc/config.properties @@ -55,6 +55,7 @@ plugin.bundles=\ ../presto-memory/pom.xml,\ ../presto-jmx/pom.xml,\ ../presto-hive-hadoop2/pom.xml,\ + ../hetu-mpp/pom.xml,\ ../presto-example-http/pom.xml,\ ../presto-kafka/pom.xml, \ ../presto-tpch/pom.xml, \ -- Gitee From c92778cd6546208ae280d8258638a57b15d3ade0 Mon Sep 17 00:00:00 2001 From: YijianCheng Date: Tue, 21 Jun 2022 15:09:03 +0800 Subject: [PATCH 02/30] delete unused pom dependency --- hetu-mpp/pom.xml | 42 ------------------------------------------ 1 file changed, 42 deletions(-) diff --git a/hetu-mpp/pom.xml b/hetu-mpp/pom.xml index e5549a885..ce5a0d8ab 100644 --- a/hetu-mpp/pom.xml +++ b/hetu-mpp/pom.xml @@ -126,54 +126,12 @@ provided - - - - - - - net.jodah expiringmap 0.5.9 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - io.prestosql.hadoop hadoop-apache -- Gitee From 8d2b4a967575fe7564b2a57a796ddb4f29e90d52 Mon Sep 17 00:00:00 2001 From: YijianCheng Date: Tue, 21 Jun 2022 15:19:46 +0800 Subject: [PATCH 03/30] modify copyright info --- hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java | 2 +- .../main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java | 2 +- .../main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java | 2 +- .../src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java | 2 +- .../main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java | 2 +- .../java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java | 2 +- .../io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java | 2 +- .../java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java | 2 +- .../io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java | 2 +- .../java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java | 2 +- .../java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java | 2 +- .../main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java | 2 +- 12 files changed, 12 insertions(+), 12 deletions(-) diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java index 901e5f578..537f1d097 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java index 3b721f6c0..ce955cfa5 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java index a0e6aa371..ecc2001ec 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java index 48272a1e1..0620393fc 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java index bdfa37b75..830601aa6 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java index 4216415c8..52812cb53 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java index ea193f6c7..a0ec0a63b 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java index 3e343bc0d..2374d1d89 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java index 63a0c8844..3aaaf50bd 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java index c7bdc872e..762fd0653 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java index 418ba9946..7c863c866 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java index 96dade13f..1a7b9af4c 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2020. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at -- Gitee From 1d5d2a3a9bea0ac399fa7e7ae8757334cbfbb126 Mon Sep 17 00:00:00 2001 From: YijianCheng Date: Tue, 21 Jun 2022 17:17:49 +0800 Subject: [PATCH 04/30] modify copyright and code style --- .../io/hetu/core/plugin/mpp/MppConfig.java | 2 +- .../io/hetu/core/plugin/mpp/MppMetadata.java | 5 ---- .../core/plugin/mpp/MppMetadataFactory.java | 9 +++--- .../hetu/core/plugin/mpp/MppSplitManager.java | 2 +- .../core/plugin/mpp/RunningTaskHashMap.java | 2 +- .../core/plugin/mpp/SynchronizedHashMap.java | 4 ++- .../hetu/core/plugin/mpp/TableMoveLock.java | 2 +- .../core/plugin/mpp/scheduler/Scheduler.java | 13 ++++---- .../plugin/mpp/scheduler/db/GsussDBOpt.java | 30 +++---------------- .../mpp/scheduler/db/GsussDBOptThread.java | 28 ++++++++--------- .../plugin/mpp/scheduler/entity/ETLInfo.java | 2 +- .../mpp/scheduler/entity/TableSchema.java | 2 +- .../plugin/mpp/scheduler/hadoop/HiveOpt.java | 6 ++-- .../plugin/mpp/scheduler/utils/Const.java | 2 +- .../core/plugin/mpp/scheduler/utils/Util.java | 18 +---------- 15 files changed, 42 insertions(+), 85 deletions(-) diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java index 537f1d097..78014abdf 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java index 04b5aa155..17f7ec1df 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadata.java @@ -131,7 +131,6 @@ public class MppMetadata String schemaName = mppConfig.getHiveDb(); String tblName = tableName.getTableName(); String threadName = Const.tableStatus.getThreadName(); -// Optional
table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()); Optional
table = metastore.getTable(new HiveIdentity(session), schemaName, tableName.getTableName()); String tblIdentifier = schemaName + "." + tblName; logger.info("Mpp scheduler for " + tblIdentifier + " started"); @@ -143,7 +142,6 @@ public class MppMetadata if (Const.etlInfoMap.containsKey(tblIdentifier) && Const.tableStatus.tableStatusKeysExists(tblIdentifier)) { // have etled at least onece if (mppConfig.isEtlReuse()) { -// scheduler.prepare(gdsServer, schemas, schemaName, tblName, false); logger.info("Hive(Mpp) table " + tblIdentifier + " existed and reuse it"); } else { @@ -156,9 +154,7 @@ public class MppMetadata // or have etled and still etling // or just create and etling // we can use two strategies to judge: rules and time interval -// ETLInfo etlInfo = Const.etlInfoMap.get(tblIdentifier); if (Const.tableStatus.tableStatusKeysExists(tblIdentifier)) { -// scheduler.prepare(gdsServer, schemas, schemaName, tblName, false); logger.info("[2]Hive(MPP) " + tblIdentifier + " Table is existed and is etling by others and reuse it!"); } else { @@ -168,7 +164,6 @@ public class MppMetadata } } -// table = metastore.getTable(new HiveIdentity(session), tableName.getSchemaName(), tableName.getTableName()); table = metastore.getTable(new HiveIdentity(session), schemaName, tableName.getTableName()); // we must not allow system tables due to how permissions are checked in SystemTableAwareAccessControl diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java index b59bb24fb..ae519024d 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppMetadataFactory.java @@ -76,6 +76,7 @@ public class MppMetadataFactory protected final int hmsWriteBatchSize; private Scheduler scheduler; private MppConfig mppConfig; + SemiTransactionalHiveMetastore semiTransactionalHiveMetastore; @Inject @SuppressWarnings("deprecation") @@ -203,7 +204,7 @@ public class MppMetadataFactory @Override public HiveMetadata get() { - SemiTransactionalHiveMetastore metastore = new SemiTransactionalHiveMetastore( + semiTransactionalHiveMetastore = new SemiTransactionalHiveMetastore( hdfsEnvironment, CachingHiveMetastore.memoizeMetastore(this.metastore, perTransactionCacheMaximumSize), // per-transaction cache renameExecution, @@ -217,7 +218,7 @@ public class MppMetadataFactory hmsWriteBatchSize); return new MppMetadata( - metastore, + semiTransactionalHiveMetastore, hdfsEnvironment, partitionManager, writesToNonManagedTablesEnabled, @@ -228,8 +229,8 @@ public class MppMetadataFactory partitionUpdateCodec, typeTranslator, prestoVersion, - new MetastoreHiveStatisticsProvider(metastore, statsCache, samplePartitionCache), - accessControlMetadataFactory.create(metastore), + new MetastoreHiveStatisticsProvider(semiTransactionalHiveMetastore, statsCache, samplePartitionCache), + accessControlMetadataFactory.create(semiTransactionalHiveMetastore), autoVacuumEnabled, vacuumDeltaNumThreshold, vacuumDeltaPercentThreshold, diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java index 60be6e97b..4e5dcd956 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppSplitManager.java @@ -220,7 +220,7 @@ public class MppSplitManager logger.info("Waitting to complete GDS process transporting data to alluxio"); } catch (InterruptedException e) { - e.printStackTrace(); + logger.error(e.getMessage()); } recode = Const.tableStatus.get(tblIdentifier); } diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java index ce955cfa5..b611a2c34 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java index ecc2001ec..0adeaf5d0 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -54,8 +54,10 @@ public class SynchronizedHashMap threadHashMap = new HashMap(); tableStatusList = new ArrayList<>(); gaussDbTaskStatusMap = new HashMap<>(); + break; case ETL_INFO_MAP: hashMap = new HashMap(); + break; default: break; } diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java index 0620393fc..a200f3d31 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java index 830601aa6..a3cae4000 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -32,6 +32,7 @@ public class Scheduler public static Logger logger = Logger.get(Scheduler.class); public MppConfig mppConfig; public Queue> gdsQueue; + private Queue> tmpQueue; @Inject public Scheduler(MppConfig mppConfig) @@ -48,11 +49,11 @@ public class Scheduler gdsServer.split("\\|"); gdsMaps.put(gdsServer.split("\\|")[0], gdsServer.split("\\|")[1]); } - Queue> gdsQueue = new LinkedList<>(); + tmpQueue = new LinkedList<>(); for (Map.Entry entry : gdsMaps.entrySet()) { - gdsQueue.add(entry); + tmpQueue.add(entry); } - return gdsQueue; + return tmpQueue; } public Map.Entry getGDS() @@ -64,7 +65,7 @@ public class Scheduler Thread.sleep(1000); } catch (InterruptedException e) { - e.printStackTrace(); + logger.error(e.getMessage()); } } else { @@ -76,8 +77,6 @@ public class Scheduler public void prepareHiveExternalTable(Map schemas, String schemaName, String tableName) { logger.info("Get schemainfo from gaussDB by table name"); -// String hivedbName = gaussdbSchema; -// String hiveSchemaInfo = schemas.get("hiveSchema"); String tblIdentifier = schemaName + "." + tableName; logger.info("Create hive foreign table using alluxio path by hiveserver2 service"); diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java index 52812cb53..0c48854b5 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -25,7 +25,6 @@ import java.sql.DatabaseMetaData; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; -import java.sql.Statement; import java.util.HashMap; import java.util.Map; @@ -52,7 +51,7 @@ public class GsussDBOpt Class.forName(driver).getConstructor().newInstance(); } catch (Exception e) { - e.printStackTrace(); + logger.error(e.getMessage()); return null; } @@ -61,33 +60,12 @@ public class GsussDBOpt logger.info("GaussDB Connection succeed!"); } catch (Exception e) { - e.printStackTrace(); + logger.error(e.getMessage()); return null; } return conn; } - public static void executeSql(Connection conn, String sql) - { - Statement stmt = null; - try { - stmt = conn.createStatement(); - boolean rc = stmt.execute(sql); - stmt.close(); - } - catch (SQLException e) { - if (stmt != null) { - try { - stmt.close(); - } - catch (SQLException e1) { - e1.printStackTrace(); - } - } - e.printStackTrace(); - } - } - public static Map getSchemas(MppConfig mppConfig, String catalog, String schema, String tableName) { String tblIdentifier = catalog + "." + schema + "." + tableName; @@ -142,7 +120,7 @@ public class GsussDBOpt conn.close(); } catch (SQLException e) { - e.printStackTrace(); + logger.error(e.getMessage()); } return schemas; } diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java index a0ec0a63b..28ba5502b 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -58,6 +58,7 @@ public class GsussDBOptThread String dropSQL, String createSQL, String insertSQL, String gaussdbSchema, String tableName, String hiveDb, String parentThreadName) { + super.setName("GsussDBOptThread"); this.gdsQueue = gdsQueue; this.gdsServer = gdsServer; this.driver = driver; @@ -75,23 +76,21 @@ public class GsussDBOptThread public Connection getConnection(String username, String passwd) { - String driver = this.driver; - String jdbcUrl = this.jdbcUrl; Connection conn = null; try { - Class.forName(driver).getConstructor().newInstance(); + Class.forName(this.driver).getConstructor().newInstance(); } catch (Exception e) { - e.printStackTrace(); + logger.error(e.getMessage()); return null; } try { - conn = DriverManager.getConnection(jdbcUrl, username, passwd); + conn = DriverManager.getConnection(this.jdbcUrl, username, passwd); logger.info("Connection succeed!"); } catch (Exception e) { - e.printStackTrace(); + logger.error(e.getMessage()); return null; } return conn; @@ -102,19 +101,20 @@ public class GsussDBOptThread Statement stmt = null; try { stmt = conn.createStatement(); - boolean rc = stmt.execute(sql); - stmt.close(); + stmt.execute(sql); } catch (SQLException e) { + logger.error(e.getMessage()); + } + finally { if (stmt != null) { try { stmt.close(); } - catch (SQLException e1) { - e1.printStackTrace(); + catch (SQLException throwables) { + logger.error(throwables.getMessage()); } } - e.printStackTrace(); } } @@ -127,12 +127,10 @@ public class GsussDBOptThread optTable(conn, dropSQL); optTable(conn, createSQL); optTable(conn, insertSQL); -// List runningThreadList = Const.runningThreadMap.get(gaussdbSchema + "." + tableName); List runningThreadList = Const.runningThreadMap.get(hiveDb + "." + tableName); synchronized (TableMoveLock.getLock(gaussdbSchema + "." + tableName)) { for (String threadName : runningThreadList) { Const.tableStatus.put(hiveDb + "." + tableName, 1, threadName); -// Const.tableStatus.put(gaussdbSchema + "." + tableName, 1, threadName); Const.runningThreadMap.removeThread(hiveDb + "." + tableName, threadName); } } @@ -143,7 +141,7 @@ public class GsussDBOptThread conn.close(); } catch (SQLException e) { - e.printStackTrace(); + logger.error(e.getMessage()); } } } diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java index 2374d1d89..9fbd31995 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java index 3aaaf50bd..80ee64371 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java index 762fd0653..cfae29fc2 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -47,10 +47,10 @@ public class HiveOpt logger.info("Finished create hive foreign table"); } catch (ClassNotFoundException e) { - e.printStackTrace(); + logger.error(e.getMessage()); } catch (SQLException e) { - e.printStackTrace(); + logger.error(e.getMessage()); } } } diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java index 7c863c866..e36ee6d06 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java index 1a7b9af4c..bd97f4416 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2022-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at @@ -99,20 +99,4 @@ public class Util } return hType; } - - public static String getGaussDBTable(String statement) - { - statement = statement.replaceAll("[\\t\\n\\r]", " ").replaceAll("\\,", " ,"); - if (Pattern.matches(".*\\/\\*\\*gds\\*\\*\\/.*", statement)) { - statement = statement.replaceAll("\\/\\*\\*gds\\*\\*\\/", ""); - - String[] tokens = statement.split(" "); - for (String token : tokens) { - if (Pattern.matches("gaussdb\\..*", token)) { - return token; - } - } - } - return null; - } } -- Gitee From 603f9e1a807bc2e084c79c28597cef260bb9b1eb Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 16 Jun 2022 21:54:29 +0800 Subject: [PATCH 05/30] update presto-Kafka for kerberos --- hetu-docs/zh/connector/kafka.md | 42 +++++ .../plugin/kafka/KafkaConnectorConfig.java | 145 ++++++++++++++++++ .../plugin/kafka/KafkaRecordSet.java | 55 +++---- .../kafka/KafkaSimpleConsumerManager.java | 51 ++++++ .../plugin/kafka/KafkaSplitManager.java | 66 ++++---- presto-main/etc/catalog/kafka.properties | 12 ++ 6 files changed, 305 insertions(+), 66 deletions(-) create mode 100644 presto-main/etc/catalog/kafka.properties diff --git a/hetu-docs/zh/connector/kafka.md b/hetu-docs/zh/connector/kafka.md index ab1f61156..dce59998d 100644 --- a/hetu-docs/zh/connector/kafka.md +++ b/hetu-docs/zh/connector/kafka.md @@ -87,6 +87,48 @@ openLooKeng必须仍然能够连接到群集的所有节点,即使这里只指 此属性是可选的;默认值为`true`。 +### `kerberos.on` + +是否开启kerberos认证,适用于开启了kerberos认证的集群。 + +此属性是可选的;默认值为`false`。 + +### `java.security.auth.login.config` + +Kafka的jaas_conf路径,也就是java认证和授权的相关文件,文件中存放的是认证和授权信息。 + +此属性是可选的;默认值为``。 + +### `java.security.krb5.conf` + +存放krb5.conf文件的路径,要注意全局配置中也需要配置此选项,例如部署后在jvm.config中配置。 + +此属性是可选的;默认值为``。 + +### `group.id` + +kafka的groupId。 + +此属性是可选的;默认值为``。 + +### `security.protocol` + +Kafka的安全协议。 + +此属性是可选的;默认值为`SASL_PLAINTEXT`。 + +### `sasl.mechanism` + +sasl机制,被用于客户端连接安全的机制。 + +此属性是可选的;默认值为`GSSAPI`。 + +### `sasl.kerberos.service.name` + +kafka运行时的kerberos principal name。 + +此属性是可选的;默认值为`kafka`。 + ## 内部列 对于每个已定义的表,连接器维护以下列: diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java index 4a93d5e40..ae5cbee93 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java @@ -70,6 +70,151 @@ public class KafkaConnectorConfig */ private boolean hideInternalColumns = true; + /** + * the path of krb5.conf ,used for develop + */ + private String krb5Conf; + + /** + * the path of kafka_client_jaas_conf + */ + private String loginConfig; + + /** + * whether use subject creds only + */ + private String useSubjectCredsOnly; + + /** + * the group id of kafka + */ + private String groupId; + + /** + * the security protocol of kafka + */ + private String securityProtocol; + + /** + * the sasl mechanism of kafka + */ + private String saslMechanism; + + /** + * the sasl kerberos service name of kafka + */ + private String saslKerberosServiceName; + + /** + * whether to use kerberos + */ + private boolean kerberosOn; + + public String getKrb5Conf() + { + return krb5Conf; + } + + @Mandatory(name = "java.security.krb5.conf", + description = "java.security.krb5.conf", + defaultValue = "", + required = false) + @Config("java.security.krb5.conf") + public void setKrb5Conf(String krb5Conf) + { + this.krb5Conf = krb5Conf; + } + + public String getLoginConfig() + { + return loginConfig; + } + + @Mandatory(name = "java.security.auth.login.config", + description = "java.security.auth.login.config", + defaultValue = "", + required = false) + @Config("java.security.auth.login.config") + public void setLoginConfig(String loginConfig) + { + this.loginConfig = loginConfig; + } + + public String getGroupId() + { + return groupId; + } + + @Mandatory(name = "group.id", + description = "group.id", + defaultValue = "", + required = false) + @Config("group.id") + public void setGroupId(String groupId) + { + this.groupId = groupId; + } + + public String getSecurityProtocol() + { + return securityProtocol; + } + + @Mandatory(name = "security.protocol", + description = "security.protocol", + defaultValue = "SASL_PLAINTEXT", + required = false) + @Config("security.protocol") + public void setSecurityProtocol(String securityProtocol) + { + this.securityProtocol = securityProtocol; + } + + public String getSaslMechanism() + { + return saslMechanism; + } + + @Mandatory(name = "sasl.mechanism", + description = "sasl.mechanism", + defaultValue = "GSSAPI", + required = false) + @Config("sasl.mechanism") + public void setSaslMechanism(String saslMechanism) + { + this.saslMechanism = saslMechanism; + } + + public String getSaslKerberosServiceName() + { + return saslKerberosServiceName; + } + + @Mandatory(name = "sasl.kerberos.service.name", + description = "sasl.kerberos.service.name", + defaultValue = "kafka", + required = false) + @Config("sasl.kerberos.service.name") + public void setSaslKerberosServiceName(String saslKerberosServiceName) + { + this.saslKerberosServiceName = saslKerberosServiceName; + } + + public boolean isKerberosOn() + { + return kerberosOn; + } + + @Mandatory(name = "kerberos.on", + description = "whether to use kerberos", + defaultValue = "false", + required = true) + @Config("kerberos.on") + public void setKerberosOn(boolean kerberosOn) + { + this.kerberosOn = kerberosOn; + } + @NotNull public File getTableDescriptionDir() { diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaRecordSet.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaRecordSet.java index 5b612ff71..30b1e86d1 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaRecordSet.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaRecordSet.java @@ -27,11 +27,13 @@ import io.prestosql.spi.connector.RecordSet; import io.prestosql.spi.type.Type; import kafka.api.FetchRequest; import kafka.api.FetchRequestBuilder; -import kafka.javaapi.FetchResponse; -import kafka.javaapi.consumer.SimpleConsumer; -import kafka.message.MessageAndOffset; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.TopicPartition; import java.nio.ByteBuffer; +import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -109,9 +111,9 @@ public class KafkaRecordSet private long totalBytes; private long totalMessages; private long cursorOffset = split.getStart(); - private Iterator messageAndOffsetIterator; + private Iterator> recordIterator; private final AtomicBoolean reported = new AtomicBoolean(); - + private KafkaConsumer leaderKafkaConsumer; private final FieldValueProvider[] currentRowValues = new FieldValueProvider[columnHandles.size()]; KafkaRecordCursor() @@ -147,19 +149,19 @@ public class KafkaRecordSet // Create a fetch request openFetchRequest(); - while (messageAndOffsetIterator.hasNext()) { - MessageAndOffset currentMessageAndOffset = messageAndOffsetIterator.next(); - long messageOffset = currentMessageAndOffset.offset(); + while (recordIterator.hasNext()) { + ConsumerRecord record = recordIterator.next(); + long messageOffset = record.offset(); if (messageOffset >= split.getEnd()) { return endOfData(); // Past our split end. Bail. } if (messageOffset >= cursorOffset) { - return nextRow(currentMessageAndOffset); + return nextRow(record); } } - messageAndOffsetIterator = null; + recordIterator = null; } } @@ -173,21 +175,21 @@ public class KafkaRecordSet return false; } - private boolean nextRow(MessageAndOffset messageAndOffset) + private boolean nextRow(ConsumerRecord record) { - cursorOffset = messageAndOffset.offset() + 1; // Cursor now points to the next message. - totalBytes += messageAndOffset.message().payloadSize(); + cursorOffset = record.offset() + 1; // Cursor now points to the next message. + totalBytes += record.serializedValueSize(); totalMessages++; byte[] keyData = EMPTY_BYTE_ARRAY; byte[] messageData = EMPTY_BYTE_ARRAY; - ByteBuffer key = messageAndOffset.message().key(); + ByteBuffer key = record.key(); if (key != null) { keyData = new byte[key.remaining()]; key.get(keyData); } - ByteBuffer message = messageAndOffset.message().payload(); + ByteBuffer message = record.value(); if (message != null) { messageData = new byte[message.remaining()]; message.get(messageData); @@ -206,7 +208,7 @@ public class KafkaRecordSet currentRowValuesMap.put(columnHandle, longValueProvider(totalMessages)); break; case PARTITION_OFFSET_FIELD: - currentRowValuesMap.put(columnHandle, longValueProvider(messageAndOffset.offset())); + currentRowValuesMap.put(columnHandle, longValueProvider(record.offset())); break; case MESSAGE_FIELD: currentRowValuesMap.put(columnHandle, bytesValueProvider(messageData)); @@ -305,12 +307,15 @@ public class KafkaRecordSet @Override public void close() { + if (leaderKafkaConsumer != null) { + leaderKafkaConsumer.close(); + } } private void openFetchRequest() { try { - if (messageAndOffsetIterator == null) { + if (recordIterator == null) { log.debug("Fetching %d bytes from offset %d (%d - %d). %d messages read so far", KAFKA_READ_BUFFER_SIZE, cursorOffset, split.getStart(), split.getEnd(), totalMessages); FetchRequest req = new FetchRequestBuilder() .clientId("presto-worker-" + Thread.currentThread().getName()) @@ -319,16 +324,14 @@ public class KafkaRecordSet // TODO - this should look at the actual node this is running on and prefer // that copy if running locally. - look into NodeInfo - SimpleConsumer consumer = consumerManager.getConsumer(split.getLeader()); - - FetchResponse fetchResponse = consumer.fetch(req); - if (fetchResponse.hasError()) { - short errorCode = fetchResponse.errorCode(split.getTopicName(), split.getPartitionId()); - log.warn("Fetch response has error: %d", errorCode); - throw new RuntimeException("could not fetch data from Kafka, error code is '" + errorCode + "'"); + if (leaderKafkaConsumer == null) { + leaderKafkaConsumer = consumerManager.getSaslConsumer(split.getLeader()); } - - messageAndOffsetIterator = fetchResponse.messageSet(split.getTopicName(), split.getPartitionId()).iterator(); + TopicPartition topicPartition = new TopicPartition(split.getTopicName(), split.getPartitionId()); + leaderKafkaConsumer.assign(Collections.singletonList(topicPartition)); + leaderKafkaConsumer.seek(topicPartition, cursorOffset); + ConsumerRecords records = leaderKafkaConsumer.poll(500); + recordIterator = records.records(topicPartition).iterator(); } } catch (Exception e) { // Catch all exceptions because Kafka library is written in scala and checked exceptions are not declared in method signature. diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java index 1f082360b..f3c70c04c 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java @@ -20,11 +20,14 @@ import io.airlift.log.Logger; import io.prestosql.spi.HostAddress; import io.prestosql.spi.NodeManager; import kafka.javaapi.consumer.SimpleConsumer; +import org.apache.kafka.clients.consumer.KafkaConsumer; import javax.annotation.PreDestroy; import javax.inject.Inject; +import java.nio.ByteBuffer; import java.util.Map; +import java.util.Properties; import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; @@ -43,6 +46,14 @@ public class KafkaSimpleConsumerManager private final int connectTimeoutMillis; private final int bufferSizeBytes; + private final boolean kerberosOn; + private final String loginConfig; + private final String krb5Conf; + private final String groupId; + private final String securityProtocol; + private final String saslMechanism; + private final String saslKerberosServiceName; + @Inject public KafkaSimpleConsumerManager( KafkaConnectorConfig kafkaConnectorConfig, @@ -55,6 +66,14 @@ public class KafkaSimpleConsumerManager this.bufferSizeBytes = toIntExact(kafkaConnectorConfig.getKafkaBufferSize().toBytes()); this.consumerCache = CacheBuilder.newBuilder().build(CacheLoader.from(this::createConsumer)); + + this.kerberosOn = kafkaConnectorConfig.isKerberosOn(); + this.loginConfig = kafkaConnectorConfig.getLoginConfig(); + this.krb5Conf = kafkaConnectorConfig.getKrb5Conf(); + this.groupId = kafkaConnectorConfig.getGroupId(); + this.securityProtocol = kafkaConnectorConfig.getSecurityProtocol(); + this.saslMechanism = kafkaConnectorConfig.getSaslMechanism(); + this.saslKerberosServiceName = kafkaConnectorConfig.getSaslKerberosServiceName(); } @PreDestroy @@ -76,6 +95,12 @@ public class KafkaSimpleConsumerManager return consumerCache.getUnchecked(host); } + public KafkaConsumer getSaslConsumer(HostAddress host) + { + requireNonNull(host, "host is null"); + return createSaslConsumer(host); + } + private SimpleConsumer createConsumer(HostAddress host) { log.info("Creating new Consumer for %s", host); @@ -85,4 +110,30 @@ public class KafkaSimpleConsumerManager bufferSizeBytes, "presto-kafka-" + nodeManager.getCurrentNode().getNodeIdentifier()); } + + private KafkaConsumer createSaslConsumer(HostAddress host) + { + log.info("Creating new SaslConsumer for %s", host); + Properties props = new Properties(); + if (kerberosOn) { + System.setProperty("java.security.auth.login.config", loginConfig); + System.setProperty("java.security.krb5.conf", krb5Conf); + props.put("security.protocol", securityProtocol); + props.put("sasl.mechanism", saslMechanism); + props.put("sasl.kerberos.service.name", saslKerberosServiceName); + } + + try { + props.put("bootstrap.servers", host.toString()); + props.put("enable.auto.commit", "false"); + props.put("key.deserializer", Class.forName("org.apache.kafka.common.serialization.ByteBufferDeserializer")); + props.put("value.deserializer", Class.forName("org.apache.kafka.common.serialization.ByteBufferDeserializer")); + props.put("group.id", groupId); + } + catch (ClassNotFoundException e) { + e.printStackTrace(); + } + + return new KafkaConsumer<>(props); + } } diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSplitManager.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSplitManager.java index be74c05c3..eb480cbcc 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSplitManager.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSplitManager.java @@ -28,15 +28,14 @@ import io.prestosql.spi.connector.ConnectorTableHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.FixedSplitSource; import kafka.api.PartitionOffsetRequestInfo; -import kafka.cluster.BrokerEndPoint; import kafka.common.TopicAndPartition; import kafka.javaapi.OffsetRequest; import kafka.javaapi.OffsetResponse; -import kafka.javaapi.PartitionMetadata; -import kafka.javaapi.TopicMetadata; -import kafka.javaapi.TopicMetadataRequest; -import kafka.javaapi.TopicMetadataResponse; import kafka.javaapi.consumer.SimpleConsumer; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.Node; +import org.apache.kafka.common.PartitionInfo; +import org.apache.kafka.common.TopicPartition; import javax.inject.Inject; @@ -47,6 +46,7 @@ import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; +import java.nio.ByteBuffer; import java.util.List; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; @@ -84,44 +84,30 @@ public class KafkaSplitManager public ConnectorSplitSource getSplits(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorTableHandle table, SplitSchedulingStrategy splitSchedulingStrategy) { KafkaTableHandle kafkaTableHandle = (KafkaTableHandle) table; - try { - SimpleConsumer simpleConsumer = consumerManager.getConsumer(selectRandom(nodes)); - - TopicMetadataRequest topicMetadataRequest = new TopicMetadataRequest(ImmutableList.of(kafkaTableHandle.getTopicName())); - TopicMetadataResponse topicMetadataResponse = simpleConsumer.send(topicMetadataRequest); + try (KafkaConsumer kafkaConsumer = consumerManager.getSaslConsumer(selectRandom(nodes))) { + List partitionInfos = kafkaConsumer.partitionsFor(kafkaTableHandle.getTopicName()); ImmutableList.Builder splits = ImmutableList.builder(); - for (TopicMetadata metadata : topicMetadataResponse.topicsMetadata()) { - for (PartitionMetadata part : metadata.partitionsMetadata()) { - log.debug("Adding Partition %s/%s", metadata.topic(), part.partitionId()); - - BrokerEndPoint leader = part.leader(); - if (leader == null) { - throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Leader election in progress for Kafka topic '%s' partition %s", metadata.topic(), part.partitionId())); - } - - HostAddress partitionLeader = HostAddress.fromParts(leader.host(), leader.port()); - - SimpleConsumer leaderConsumer = consumerManager.getConsumer(partitionLeader); - // Kafka contains a reverse list of "end - start" pairs for the splits - - long[] offsets = findAllOffsets(leaderConsumer, metadata.topic(), part.partitionId()); - - for (int i = offsets.length - 1; i > 0; i--) { - KafkaSplit split = new KafkaSplit( - metadata.topic(), - kafkaTableHandle.getKeyDataFormat(), - kafkaTableHandle.getMessageDataFormat(), - kafkaTableHandle.getKeyDataSchemaLocation().map(KafkaSplitManager::readSchema), - kafkaTableHandle.getMessageDataSchemaLocation().map(KafkaSplitManager::readSchema), - part.partitionId(), - offsets[i], - offsets[i - 1], - partitionLeader); - splits.add(split); - } - } + for (PartitionInfo partitionInfo : partitionInfos) { + log.debug("Adding Partition %s/%s", partitionInfo.topic(), partitionInfo.partition()); + Node leader = partitionInfo.leader(); + HostAddress partitionLeader = HostAddress.fromParts(leader.host(), leader.port()); + TopicPartition topicPartition = new TopicPartition(partitionInfo.topic(), partitionInfo.partition()); + kafkaConsumer.assign(ImmutableList.of(topicPartition)); + long beginOffset = kafkaConsumer.beginningOffsets(ImmutableList.of(topicPartition)).values().iterator().next(); + long endOffset = kafkaConsumer.endOffsets(ImmutableList.of(topicPartition)).values().iterator().next(); + KafkaSplit split = new KafkaSplit( + topicPartition.topic(), + kafkaTableHandle.getKeyDataFormat(), + kafkaTableHandle.getMessageDataFormat(), + kafkaTableHandle.getKeyDataSchemaLocation().map(KafkaSplitManager::readSchema), + kafkaTableHandle.getMessageDataSchemaLocation().map(KafkaSplitManager::readSchema), + topicPartition.partition(), + beginOffset, + endOffset, + partitionLeader); + splits.add(split); } return new FixedSplitSource(splits.build()); diff --git a/presto-main/etc/catalog/kafka.properties b/presto-main/etc/catalog/kafka.properties new file mode 100644 index 000000000..eed0c8015 --- /dev/null +++ b/presto-main/etc/catalog/kafka.properties @@ -0,0 +1,12 @@ +connector.name=kafka +kafka.nodes=localhost:9092 +kafka.table-names=testTopic +kafka.hide-internal-columns=false +kerberos.on=true + +java.security.auth.login.config=/Users/mac/Desktop/kafka-jaas.conf +java.security.krb5.conf=/Users/mac/Desktop/krb5.conf +group.id=test1 +security.protocol=SASL_PLAINTEXT +sasl.mechanism=GSSAPI +sasl.kerberos.service.name=kafka -- Gitee From e38a29e5bdf92590f05e6347eaa54906d9e364ff Mon Sep 17 00:00:00 2001 From: YijianCheng Date: Tue, 21 Jun 2022 18:54:08 +0800 Subject: [PATCH 06/30] add licence and modify licence info --- .../main/java/io/hetu/core/plugin/mpp/MppConfig.java | 2 +- .../io/hetu/core/plugin/mpp/RunningTaskHashMap.java | 2 +- .../io/hetu/core/plugin/mpp/SynchronizedHashMap.java | 2 +- .../java/io/hetu/core/plugin/mpp/TableMoveLock.java | 2 +- .../io/hetu/core/plugin/mpp/scheduler/Scheduler.java | 2 +- .../core/plugin/mpp/scheduler/db/GsussDBOpt.java | 2 +- .../plugin/mpp/scheduler/db/GsussDBOptThread.java | 2 +- .../core/plugin/mpp/scheduler/entity/ETLInfo.java | 2 +- .../plugin/mpp/scheduler/entity/TableSchema.java | 2 +- .../core/plugin/mpp/scheduler/hadoop/HiveOpt.java | 2 +- .../hetu/core/plugin/mpp/scheduler/utils/Const.java | 2 +- .../hetu/core/plugin/mpp/scheduler/utils/Util.java | 2 +- pom.xml | 1 + .../license/license-header-alternate-2022-1.txt | 12 ++++++++++++ 14 files changed, 25 insertions(+), 12 deletions(-) create mode 100644 src/main/resource/license/license-header-alternate-2022-1.txt diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java index 78014abdf..dbf7bd4e6 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/MppConfig.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java index b611a2c34..c2188d885 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/RunningTaskHashMap.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java index 0adeaf5d0..9e437c1c6 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/SynchronizedHashMap.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java index a200f3d31..fec5b74f0 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/TableMoveLock.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java index a3cae4000..9012f20a8 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/Scheduler.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java index 0c48854b5..097ce0599 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOpt.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java index 28ba5502b..3fc87e14f 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/db/GsussDBOptThread.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java index 9fbd31995..d29aa5900 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/ETLInfo.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java index 80ee64371..766d6d30e 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/entity/TableSchema.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java index cfae29fc2..52eb26758 100644 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/hadoop/HiveOpt.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java index e36ee6d06..df4f94df5 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Const.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java index bd97f4416..81a24bf54 100755 --- a/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java +++ b/hetu-mpp/src/main/java/io/hetu/core/plugin/mpp/scheduler/utils/Util.java @@ -1,5 +1,5 @@ /* - * Copyright (C) 2018-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at diff --git a/pom.xml b/pom.xml index a66f294ab..3044708d4 100644 --- a/pom.xml +++ b/pom.xml @@ -1737,6 +1737,7 @@ ${air.main.basedir}/src/main/resource/license/license-header-alternate-2020.txt ${air.main.basedir}/src/main/resource/license/license-header-alternate-2021.txt ${air.main.basedir}/src/main/resource/license/license-header-alternate-2022.txt + ${air.main.basedir}/src/main/resource/license/license-header-alternate-2022-1.txt ${air.main.basedir}/src/main/resource/license/license-header-third.txt diff --git a/src/main/resource/license/license-header-alternate-2022-1.txt b/src/main/resource/license/license-header-alternate-2022-1.txt new file mode 100644 index 000000000..3c8817696 --- /dev/null +++ b/src/main/resource/license/license-header-alternate-2022-1.txt @@ -0,0 +1,12 @@ +Copyright (C) 2022-2022. Yijian Cheng. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. -- Gitee From 7798d50fabbb8bfea86d281d0338d066ab34e351 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Tue, 21 Jun 2022 19:05:52 +0800 Subject: [PATCH 07/30] update presto-Kafka for kerberos --- hetu-docs/zh/connector/kafka.md | 26 ++++++++++++------- .../kafka/KafkaSimpleConsumerManager.java | 2 ++ 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/hetu-docs/zh/connector/kafka.md b/hetu-docs/zh/connector/kafka.md index dce59998d..985c6553a 100644 --- a/hetu-docs/zh/connector/kafka.md +++ b/hetu-docs/zh/connector/kafka.md @@ -29,16 +29,22 @@ kafka.nodes=host1:port,host2:port 配置属性包括: -| 属性名称| 说明| -|:----------|:----------| -| `kafka.table-names`| 目录提供的所有表列表| -| `kafka.default-schema`| 表的默认模式名| -| `kafka.nodes`| Kafka集群节点列表| -| `kafka.connect-timeout`| 连接Kafka集群超时| -| `kafka.buffer-size`| Kafka读缓冲区大小| -| `kafka.table-description-dir`| 包含主题描述文件的目录| -| `kafka.hide-internal-columns`| 控制内部列是否是表模式的一部分| - +| 属性名称| 说明 | +|:----------|:-----------------------------------| +| `kafka.table-names`| 目录提供的所有表列表 | +| `kafka.default-schema`| 表的默认模式名 | +| `kafka.nodes`| Kafka集群节点列表 | +| `kafka.connect-timeout`| 连接Kafka集群超时 | +| `kafka.buffer-size`| Kafka读缓冲区大小 | +| `kafka.table-description-dir`| 包含主题描述文件的目录 | +| `kafka.hide-internal-columns`| 控制内部列是否是表模式的一部分 | +| `kerberos.on`| 是否开启Kerberos认证 | +| `java.security.auth.login.config`| kafka_client_jass.conf路径 | +| `java.security.krb5.conf`| krb5.conf文件路径 | +| `group.id`| kafka的groupID | +| `security.protocol`| Kafka的安全认证协议 | +| `sasl.mechanism`| sasl机制 | +| `sasl.kerberos.service.name`| kafka服务运行时的kerberos principal name | ### `kafka.table-names` 此目录提供的所有表的逗号分隔列表。表名可以是非限定的(简单名称),并将被放入默认模式(见下文)中,或者用模式名称(`.`)限定。 diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java index f3c70c04c..627fd2a04 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java @@ -129,6 +129,8 @@ public class KafkaSimpleConsumerManager props.put("key.deserializer", Class.forName("org.apache.kafka.common.serialization.ByteBufferDeserializer")); props.put("value.deserializer", Class.forName("org.apache.kafka.common.serialization.ByteBufferDeserializer")); props.put("group.id", groupId); + props.put("session.timeout.ms", connectTimeoutMillis); + props.put("receive.buffer.bytes", bufferSizeBytes); } catch (ClassNotFoundException e) { e.printStackTrace(); -- Gitee From 3aff9171859a1cfca3ed092f4869108e3ad1ed90 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 16:47:11 +0800 Subject: [PATCH 08/30] fix the null of group id --- hetu-docs/zh/connector/kafka.md | 2 +- .../plugin/kafka/KafkaConnectorConfig.java | 23 ++++++++++++------- .../kafka/KafkaSimpleConsumerManager.java | 8 +++++-- 3 files changed, 22 insertions(+), 11 deletions(-) diff --git a/hetu-docs/zh/connector/kafka.md b/hetu-docs/zh/connector/kafka.md index 985c6553a..b59d087f1 100644 --- a/hetu-docs/zh/connector/kafka.md +++ b/hetu-docs/zh/connector/kafka.md @@ -107,7 +107,7 @@ Kafka的jaas_conf路径,也就是java认证和授权的相关文件,文件 ### `java.security.krb5.conf` -存放krb5.conf文件的路径,要注意全局配置中也需要配置此选项,例如部署后在jvm.config中配置。 +存放krb5.conf文件的路径,要注意全局配置中也需要配置此选项,例如部署后在jvm.config中配置,而在开发中需要在启动PrestoServer时使用"-D"参数配置。 此属性是可选的;默认值为``。 diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java index ae5cbee93..1df2891ef 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java @@ -120,9 +120,10 @@ public class KafkaConnectorConfig defaultValue = "", required = false) @Config("java.security.krb5.conf") - public void setKrb5Conf(String krb5Conf) + public KafkaConnectorConfig setKrb5Conf(String krb5Conf) { this.krb5Conf = krb5Conf; + return this; } public String getLoginConfig() @@ -135,9 +136,10 @@ public class KafkaConnectorConfig defaultValue = "", required = false) @Config("java.security.auth.login.config") - public void setLoginConfig(String loginConfig) + public KafkaConnectorConfig setLoginConfig(String loginConfig) { this.loginConfig = loginConfig; + return this; } public String getGroupId() @@ -147,12 +149,13 @@ public class KafkaConnectorConfig @Mandatory(name = "group.id", description = "group.id", - defaultValue = "", + defaultValue = "test", required = false) @Config("group.id") - public void setGroupId(String groupId) + public KafkaConnectorConfig setGroupId(String groupId) { this.groupId = groupId; + return this; } public String getSecurityProtocol() @@ -165,9 +168,10 @@ public class KafkaConnectorConfig defaultValue = "SASL_PLAINTEXT", required = false) @Config("security.protocol") - public void setSecurityProtocol(String securityProtocol) + public KafkaConnectorConfig setSecurityProtocol(String securityProtocol) { this.securityProtocol = securityProtocol; + return this; } public String getSaslMechanism() @@ -180,9 +184,10 @@ public class KafkaConnectorConfig defaultValue = "GSSAPI", required = false) @Config("sasl.mechanism") - public void setSaslMechanism(String saslMechanism) + public KafkaConnectorConfig setSaslMechanism(String saslMechanism) { this.saslMechanism = saslMechanism; + return this; } public String getSaslKerberosServiceName() @@ -195,9 +200,10 @@ public class KafkaConnectorConfig defaultValue = "kafka", required = false) @Config("sasl.kerberos.service.name") - public void setSaslKerberosServiceName(String saslKerberosServiceName) + public KafkaConnectorConfig setSaslKerberosServiceName(String saslKerberosServiceName) { this.saslKerberosServiceName = saslKerberosServiceName; + return this; } public boolean isKerberosOn() @@ -210,9 +216,10 @@ public class KafkaConnectorConfig defaultValue = "false", required = true) @Config("kerberos.on") - public void setKerberosOn(boolean kerberosOn) + public KafkaConnectorConfig setKerberosOn(boolean kerberosOn) { this.kerberosOn = kerberosOn; + return this; } @NotNull diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java index 627fd2a04..056316072 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java @@ -28,6 +28,7 @@ import javax.inject.Inject; import java.nio.ByteBuffer; import java.util.Map; import java.util.Properties; +import java.util.UUID; import static java.lang.Math.toIntExact; import static java.util.Objects.requireNonNull; @@ -49,7 +50,7 @@ public class KafkaSimpleConsumerManager private final boolean kerberosOn; private final String loginConfig; private final String krb5Conf; - private final String groupId; + private String groupId; private final String securityProtocol; private final String saslMechanism; private final String saslKerberosServiceName; @@ -128,12 +129,15 @@ public class KafkaSimpleConsumerManager props.put("enable.auto.commit", "false"); props.put("key.deserializer", Class.forName("org.apache.kafka.common.serialization.ByteBufferDeserializer")); props.put("value.deserializer", Class.forName("org.apache.kafka.common.serialization.ByteBufferDeserializer")); + if (groupId == null) { + groupId = UUID.randomUUID().toString(); + } props.put("group.id", groupId); props.put("session.timeout.ms", connectTimeoutMillis); props.put("receive.buffer.bytes", bufferSizeBytes); } catch (ClassNotFoundException e) { - e.printStackTrace(); + log.error(e, "failed to create kafka consumer"); } return new KafkaConsumer<>(props); -- Gitee From 1e9ed7ae29996759e4ba4ac8cb4515db108ab993 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 16:53:44 +0800 Subject: [PATCH 09/30] fix the null of group id --- hetu-docs/zh/connector/kafka.md | 2 +- presto-main/etc/catalog/kafka.properties | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/hetu-docs/zh/connector/kafka.md b/hetu-docs/zh/connector/kafka.md index b59d087f1..0adf0eaca 100644 --- a/hetu-docs/zh/connector/kafka.md +++ b/hetu-docs/zh/connector/kafka.md @@ -95,7 +95,7 @@ openLooKeng必须仍然能够连接到群集的所有节点,即使这里只指 ### `kerberos.on` -是否开启kerberos认证,适用于开启了kerberos认证的集群。 +是否开启kerberos认证,适用于开启了kerberos认证的集群,如果在运行presto-kafka中的测试包,请置为false,因为测试程序使用内嵌Kafka,不支持认证。 此属性是可选的;默认值为`false`。 diff --git a/presto-main/etc/catalog/kafka.properties b/presto-main/etc/catalog/kafka.properties index eed0c8015..c5025f6e6 100644 --- a/presto-main/etc/catalog/kafka.properties +++ b/presto-main/etc/catalog/kafka.properties @@ -2,11 +2,10 @@ connector.name=kafka kafka.nodes=localhost:9092 kafka.table-names=testTopic kafka.hide-internal-columns=false -kerberos.on=true - -java.security.auth.login.config=/Users/mac/Desktop/kafka-jaas.conf -java.security.krb5.conf=/Users/mac/Desktop/krb5.conf -group.id=test1 +kerberos.on=false +java.security.auth.login.config=/Users/path/kafka-jaas.conf +java.security.krb5.conf=/Users/path/krb5.conf +group.id=testTopic security.protocol=SASL_PLAINTEXT sasl.mechanism=GSSAPI sasl.kerberos.service.name=kafka -- Gitee From c2f6e746d9fcafcc1cec3e19c5712aadb328a3c0 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 17:56:27 +0800 Subject: [PATCH 10/30] fix the null of group id --- .../prestosql/plugin/kafka/TestKafkaConnectorConfig.java | 4 +++- presto-main/etc/catalog/dc.properties | 4 ---- presto-main/etc/catalog/hive.properties | 9 --------- 3 files changed, 3 insertions(+), 14 deletions(-) delete mode 100644 presto-main/etc/catalog/dc.properties delete mode 100644 presto-main/etc/catalog/hive.properties diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index 27650796b..616f51c3d 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -31,7 +31,7 @@ public class TestKafkaConnectorConfig .setKafkaBufferSize("64kB") .setDefaultSchema("default") .setTableNames("") - .setTableDescriptionDir(new File("etc/kafka/")) + .setTableDescriptionDir(new File("etc/kafka/")).setGroupId("ccc") .setHideInternalColumns(true)); } @@ -46,6 +46,7 @@ public class TestKafkaConnectorConfig .put("kafka.connect-timeout", "1h") .put("kafka.buffer-size", "1MB") .put("kafka.hide-internal-columns", "false") + .put("group.id", "bbb") .build(); KafkaConnectorConfig expected = new KafkaConnectorConfig() @@ -55,6 +56,7 @@ public class TestKafkaConnectorConfig .setNodes("localhost:12345, localhost:23456") .setKafkaConnectTimeout("1h") .setKafkaBufferSize("1MB") + .setGroupId("aaa") .setHideInternalColumns(false); ConfigAssertions.assertFullMapping(properties, expected); diff --git a/presto-main/etc/catalog/dc.properties b/presto-main/etc/catalog/dc.properties deleted file mode 100644 index 51e333ffa..000000000 --- a/presto-main/etc/catalog/dc.properties +++ /dev/null @@ -1,4 +0,0 @@ -connector.name=dc -connection-url=http://localhost:8090 -connection-user=root -connection-password= \ No newline at end of file diff --git a/presto-main/etc/catalog/hive.properties b/presto-main/etc/catalog/hive.properties deleted file mode 100644 index 5f7221544..000000000 --- a/presto-main/etc/catalog/hive.properties +++ /dev/null @@ -1,9 +0,0 @@ -# -# WARNING -# ^^^^^^^ -# This configuration file is for development only and should NOT be used -# in production. For example configuration, see the Presto documentation. -# - -connector.name=hive-hadoop2 -hive.metastore.uri=thrift://localhost:9083 \ No newline at end of file -- Gitee From 6a813fac58833143e2daff6172b569a088aef1e5 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 18:23:37 +0800 Subject: [PATCH 11/30] add kafka test conf --- .../prestosql/plugin/kafka/TestKafkaConnectorConfig.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index 616f51c3d..8ca69a48b 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -31,7 +31,7 @@ public class TestKafkaConnectorConfig .setKafkaBufferSize("64kB") .setDefaultSchema("default") .setTableNames("") - .setTableDescriptionDir(new File("etc/kafka/")).setGroupId("ccc") + .setTableDescriptionDir(new File("etc/kafka/")) .setHideInternalColumns(true)); } @@ -46,7 +46,6 @@ public class TestKafkaConnectorConfig .put("kafka.connect-timeout", "1h") .put("kafka.buffer-size", "1MB") .put("kafka.hide-internal-columns", "false") - .put("group.id", "bbb") .build(); KafkaConnectorConfig expected = new KafkaConnectorConfig() @@ -56,7 +55,11 @@ public class TestKafkaConnectorConfig .setNodes("localhost:12345, localhost:23456") .setKafkaConnectTimeout("1h") .setKafkaBufferSize("1MB") - .setGroupId("aaa") + .setGroupId("test") + .setKrb5Conf("/etc/krb5.conf") + .setLoginConfig("/etc/kafka_client_jaas.conf") + .setSaslKerberosServiceName("kafka") + .setSaslMechanism("GSSAPI") .setHideInternalColumns(false); ConfigAssertions.assertFullMapping(properties, expected); -- Gitee From 2a9ed68d2cee0dd3ef6ce9370e6380b5385ec515 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 18:39:07 +0800 Subject: [PATCH 12/30] add kafka test conf --- .../io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index 8ca69a48b..c088f157d 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -32,6 +32,11 @@ public class TestKafkaConnectorConfig .setDefaultSchema("default") .setTableNames("") .setTableDescriptionDir(new File("etc/kafka/")) + .setGroupId("test") + .setKrb5Conf("/etc/krb5.conf") + .setLoginConfig("/etc/kafka_client_jaas.conf") + .setSaslKerberosServiceName("kafka") + .setSaslMechanism("GSSAPI") .setHideInternalColumns(true)); } -- Gitee From f2664c261f212fd6bb887c2d3f03a17162f46fcc Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 18:56:50 +0800 Subject: [PATCH 13/30] add kafka test conf --- .../java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java | 1 + 1 file changed, 1 insertion(+) diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index c088f157d..745f49926 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -51,6 +51,7 @@ public class TestKafkaConnectorConfig .put("kafka.connect-timeout", "1h") .put("kafka.buffer-size", "1MB") .put("kafka.hide-internal-columns", "false") + .put("group.id", "bbb") .build(); KafkaConnectorConfig expected = new KafkaConnectorConfig() -- Gitee From 7da529ee2b633806cf0969215b84ffc870d12814 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 20:10:02 +0800 Subject: [PATCH 14/30] add kafka test conf --- .../plugin/kafka/TestKafkaConnectorConfig.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index 745f49926..d8b272457 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -51,7 +51,13 @@ public class TestKafkaConnectorConfig .put("kafka.connect-timeout", "1h") .put("kafka.buffer-size", "1MB") .put("kafka.hide-internal-columns", "false") - .put("group.id", "bbb") + .put("group.id", "test") + .put("java.security.auth.login.config","/etc/kafka_client_jaas.conf") + .put("java.security.krb5.conf","/etc/krb5.conf") + .put("kerberos.on","false") + .put("sasl.kerberos.service.name","kafka") + .put("sasl.mechanism","GSSAPI") + .put("security.protocol","SASL_PLAINTEXT") .build(); KafkaConnectorConfig expected = new KafkaConnectorConfig() @@ -66,6 +72,8 @@ public class TestKafkaConnectorConfig .setLoginConfig("/etc/kafka_client_jaas.conf") .setSaslKerberosServiceName("kafka") .setSaslMechanism("GSSAPI") + .setKerberosOn(false) + .setSecurityProtocol("SASL_PLAINTEXT") .setHideInternalColumns(false); ConfigAssertions.assertFullMapping(properties, expected); -- Gitee From ca4d024b248276ee360e2ba2f326899eb92cb099 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 20:30:27 +0800 Subject: [PATCH 15/30] add kafka test conf --- .../plugin/kafka/TestKafkaConnectorConfig.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index d8b272457..7ecc62fdf 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -52,12 +52,12 @@ public class TestKafkaConnectorConfig .put("kafka.buffer-size", "1MB") .put("kafka.hide-internal-columns", "false") .put("group.id", "test") - .put("java.security.auth.login.config","/etc/kafka_client_jaas.conf") - .put("java.security.krb5.conf","/etc/krb5.conf") - .put("kerberos.on","false") - .put("sasl.kerberos.service.name","kafka") - .put("sasl.mechanism","GSSAPI") - .put("security.protocol","SASL_PLAINTEXT") + .put("java.security.auth.login.config", "/etc/kafka_client_jaas.conf") + .put("java.security.krb5.conf", "/etc/krb5.conf") + .put("kerberos.on", "false") + .put("sasl.kerberos.service.name", "kafka") + .put("sasl.mechanism", "GSSAPI") + .put("security.protocol", "SASL_PLAINTEXT") .build(); KafkaConnectorConfig expected = new KafkaConnectorConfig() -- Gitee From e5817f83d79b949fddbcc5cb4250a234c16590ee Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 21:02:32 +0800 Subject: [PATCH 16/30] add kafka test conf --- .../io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index 7ecc62fdf..3300fa02a 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -33,6 +33,8 @@ public class TestKafkaConnectorConfig .setTableNames("") .setTableDescriptionDir(new File("etc/kafka/")) .setGroupId("test") + .setKerberosOn(false) + .setSecurityProtocol("SASL_PLAINTEXT") .setKrb5Conf("/etc/krb5.conf") .setLoginConfig("/etc/kafka_client_jaas.conf") .setSaslKerberosServiceName("kafka") -- Gitee From d20985a5cb429fac8aef78a1db80f5347c483932 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 23 Jun 2022 22:46:13 +0800 Subject: [PATCH 17/30] add kafka test conf --- .../plugin/kafka/KafkaConnectorConfig.java | 6 +++--- .../plugin/kafka/KafkaSimpleConsumerManager.java | 4 ++-- .../plugin/kafka/TestKafkaConnectorConfig.java | 16 ++++++++-------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java index 1df2891ef..ae5708ac8 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java @@ -108,7 +108,7 @@ public class KafkaConnectorConfig /** * whether to use kerberos */ - private boolean kerberosOn; + private String kerberosOn; public String getKrb5Conf() { @@ -206,7 +206,7 @@ public class KafkaConnectorConfig return this; } - public boolean isKerberosOn() + public String isKerberosOn() { return kerberosOn; } @@ -216,7 +216,7 @@ public class KafkaConnectorConfig defaultValue = "false", required = true) @Config("kerberos.on") - public KafkaConnectorConfig setKerberosOn(boolean kerberosOn) + public KafkaConnectorConfig setKerberosOn(String kerberosOn) { this.kerberosOn = kerberosOn; return this; diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java index 056316072..91b5ccc3f 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java @@ -47,7 +47,7 @@ public class KafkaSimpleConsumerManager private final int connectTimeoutMillis; private final int bufferSizeBytes; - private final boolean kerberosOn; + private final String kerberosOn; private final String loginConfig; private final String krb5Conf; private String groupId; @@ -116,7 +116,7 @@ public class KafkaSimpleConsumerManager { log.info("Creating new SaslConsumer for %s", host); Properties props = new Properties(); - if (kerberosOn) { + if ("true".equalsIgnoreCase(kerberosOn)) { System.setProperty("java.security.auth.login.config", loginConfig); System.setProperty("java.security.krb5.conf", krb5Conf); props.put("security.protocol", securityProtocol); diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index 3300fa02a..ac2d7444a 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -32,13 +32,13 @@ public class TestKafkaConnectorConfig .setDefaultSchema("default") .setTableNames("") .setTableDescriptionDir(new File("etc/kafka/")) - .setGroupId("test") - .setKerberosOn(false) - .setSecurityProtocol("SASL_PLAINTEXT") - .setKrb5Conf("/etc/krb5.conf") - .setLoginConfig("/etc/kafka_client_jaas.conf") - .setSaslKerberosServiceName("kafka") - .setSaslMechanism("GSSAPI") + .setGroupId(null) + .setKerberosOn(null) + .setSecurityProtocol(null) + .setKrb5Conf(null) + .setLoginConfig(null) + .setSaslKerberosServiceName(null) + .setSaslMechanism(null) .setHideInternalColumns(true)); } @@ -74,7 +74,7 @@ public class TestKafkaConnectorConfig .setLoginConfig("/etc/kafka_client_jaas.conf") .setSaslKerberosServiceName("kafka") .setSaslMechanism("GSSAPI") - .setKerberosOn(false) + .setKerberosOn("false") .setSecurityProtocol("SASL_PLAINTEXT") .setHideInternalColumns(false); -- Gitee From 108920521dfbaafb5c475598855fb30b1d111376 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Thu, 30 Jun 2022 13:40:16 +0800 Subject: [PATCH 18/30] add user password for kafka --- hetu-docs/zh/connector/kafka.md | 17 ++++++++--- .../plugin/kafka/KafkaConnectorConfig.java | 28 ++++++++++++++++--- .../kafka/KafkaSimpleConsumerManager.java | 9 +++++- .../kafka/TestKafkaConnectorConfig.java | 17 +++++++++-- presto-main/etc/catalog/kafka.properties | 5 ++-- 5 files changed, 63 insertions(+), 13 deletions(-) diff --git a/hetu-docs/zh/connector/kafka.md b/hetu-docs/zh/connector/kafka.md index 0adf0eaca..35e87302e 100644 --- a/hetu-docs/zh/connector/kafka.md +++ b/hetu-docs/zh/connector/kafka.md @@ -39,7 +39,8 @@ kafka.nodes=host1:port,host2:port | `kafka.table-description-dir`| 包含主题描述文件的目录 | | `kafka.hide-internal-columns`| 控制内部列是否是表模式的一部分 | | `kerberos.on`| 是否开启Kerberos认证 | -| `java.security.auth.login.config`| kafka_client_jass.conf路径 | +| `user.password.auth.on`| 是否开启kafka用户密码认证 | +| `sasl.jaas.config`| 认证相关信息 | | `java.security.krb5.conf`| krb5.conf文件路径 | | `group.id`| kafka的groupID | | `security.protocol`| Kafka的安全认证协议 | @@ -95,13 +96,21 @@ openLooKeng必须仍然能够连接到群集的所有节点,即使这里只指 ### `kerberos.on` -是否开启kerberos认证,适用于开启了kerberos认证的集群,如果在运行presto-kafka中的测试包,请置为false,因为测试程序使用内嵌Kafka,不支持认证。 +是否开启kerberos认证,适用于开启了kerberos认证的集群,如果在运行presto-kafka中的测试包,请置为false,因为测试程序使用内嵌Kafka,不支持认证,且该项与`user.password.auth.on`仅能选择一个,若两者均为true,则`user.password.auth.on`将覆盖`kerberos.on`且会异常。 此属性是可选的;默认值为`false`。 -### `java.security.auth.login.config` +### `user.password.auth.on` -Kafka的jaas_conf路径,也就是java认证和授权的相关文件,文件中存放的是认证和授权信息。 +是否开启用户密码认证,适用于开启了用户密码认证的集群,如果在运行presto-kafka中的测试包,请置为false,因为测试程序使用内嵌Kafka,不支持认证,且该项与`kerberos.on`仅能选择一个,若两者均为true,则`user.password.auth.on`将覆盖`kerberos.on`且会异常。 +若使用用户密码认证,需要在jvm配置中增加-Djava.ext.dirs=$JAVA_HOME/jre/lib/ext:/Users/mac/apps/lib,其中/Users/mac/apps/lib下需要放置kafka-client.jar,否则会报出 loginModule not found的相关异常 +此属性是可选的;默认值为`false`。 + +### `sasl.jaas.config` + +Kafka的认证相关信息。 +对于kerberos认证,可能的值为:sasl.jaas.config= com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true useTicketCache=true serviceName=kafka keyTab=\"/Users/mac/Desktop/user01.keytab\" principal=\"user01@EXAMPLE.COM\"; +对于用户密码认证,可能的值为:sasl.jaas.config= org.apache.kafka.common.security.plain.PlainLoginModule required username=\"producer\" password=\"producerpwd\"; 此属性是可选的;默认值为``。 diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java index ae5708ac8..41699950b 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaConnectorConfig.java @@ -109,6 +109,26 @@ public class KafkaConnectorConfig * whether to use kerberos */ private String kerberosOn; + /** + * whether to use user and password + */ + private String userPasswordOn; + + public String getUserPasswordOn() + { + return userPasswordOn; + } + + @Mandatory(name = "user.password.auth.on", + description = "user.password.auth.on", + defaultValue = "", + required = false) + @Config("user.password.auth.on") + public KafkaConnectorConfig setUserPasswordOn(String userPasswordOn) + { + this.userPasswordOn = userPasswordOn; + return this; + } public String getKrb5Conf() { @@ -131,11 +151,11 @@ public class KafkaConnectorConfig return loginConfig; } - @Mandatory(name = "java.security.auth.login.config", - description = "java.security.auth.login.config", + @Mandatory(name = "sasl.jaas.config", + description = "sasl.jaas.config", defaultValue = "", required = false) - @Config("java.security.auth.login.config") + @Config("sasl.jaas.config") public KafkaConnectorConfig setLoginConfig(String loginConfig) { this.loginConfig = loginConfig; @@ -214,7 +234,7 @@ public class KafkaConnectorConfig @Mandatory(name = "kerberos.on", description = "whether to use kerberos", defaultValue = "false", - required = true) + required = false) @Config("kerberos.on") public KafkaConnectorConfig setKerberosOn(String kerberosOn) { diff --git a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java index 91b5ccc3f..1584b0fc7 100644 --- a/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java +++ b/presto-kafka/src/main/java/io/prestosql/plugin/kafka/KafkaSimpleConsumerManager.java @@ -54,6 +54,7 @@ public class KafkaSimpleConsumerManager private final String securityProtocol; private final String saslMechanism; private final String saslKerberosServiceName; + private final String userPasswordOn; @Inject public KafkaSimpleConsumerManager( @@ -69,6 +70,7 @@ public class KafkaSimpleConsumerManager this.consumerCache = CacheBuilder.newBuilder().build(CacheLoader.from(this::createConsumer)); this.kerberosOn = kafkaConnectorConfig.isKerberosOn(); + this.userPasswordOn = kafkaConnectorConfig.getUserPasswordOn(); this.loginConfig = kafkaConnectorConfig.getLoginConfig(); this.krb5Conf = kafkaConnectorConfig.getKrb5Conf(); this.groupId = kafkaConnectorConfig.getGroupId(); @@ -117,12 +119,17 @@ public class KafkaSimpleConsumerManager log.info("Creating new SaslConsumer for %s", host); Properties props = new Properties(); if ("true".equalsIgnoreCase(kerberosOn)) { - System.setProperty("java.security.auth.login.config", loginConfig); + props.put("sasl.jaas.config", loginConfig); System.setProperty("java.security.krb5.conf", krb5Conf); props.put("security.protocol", securityProtocol); props.put("sasl.mechanism", saslMechanism); props.put("sasl.kerberos.service.name", saslKerberosServiceName); } + else if ("true".equalsIgnoreCase(userPasswordOn)) { + props.put("sasl.jaas.config", loginConfig); + props.put("security.protocol", securityProtocol); + props.put("sasl.mechanism", saslMechanism); + } try { props.put("bootstrap.servers", host.toString()); diff --git a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java index ac2d7444a..9adc647e7 100644 --- a/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java +++ b/presto-kafka/src/test/java/io/prestosql/plugin/kafka/TestKafkaConnectorConfig.java @@ -36,6 +36,7 @@ public class TestKafkaConnectorConfig .setKerberosOn(null) .setSecurityProtocol(null) .setKrb5Conf(null) + .setUserPasswordOn(null) .setLoginConfig(null) .setSaslKerberosServiceName(null) .setSaslMechanism(null) @@ -54,9 +55,15 @@ public class TestKafkaConnectorConfig .put("kafka.buffer-size", "1MB") .put("kafka.hide-internal-columns", "false") .put("group.id", "test") - .put("java.security.auth.login.config", "/etc/kafka_client_jaas.conf") + .put("sasl.jaas.config", "com.sun.security.auth.module.Krb5LoginModule required" + + " useKeyTab=true" + + " useTicketCache=true" + + " serviceName=kafka" + + " keyTab=\"/Users/mac/Desktop/user01.keytab\"" + + " principal=\"user01@EXAMPLE.COM\";") .put("java.security.krb5.conf", "/etc/krb5.conf") .put("kerberos.on", "false") + .put("user.password.auth.on", "false") .put("sasl.kerberos.service.name", "kafka") .put("sasl.mechanism", "GSSAPI") .put("security.protocol", "SASL_PLAINTEXT") @@ -71,10 +78,16 @@ public class TestKafkaConnectorConfig .setKafkaBufferSize("1MB") .setGroupId("test") .setKrb5Conf("/etc/krb5.conf") - .setLoginConfig("/etc/kafka_client_jaas.conf") + .setLoginConfig("com.sun.security.auth.module.Krb5LoginModule required" + + " useKeyTab=true" + + " useTicketCache=true" + + " serviceName=kafka" + + " keyTab=\"/Users/mac/Desktop/user01.keytab\"" + + " principal=\"user01@EXAMPLE.COM\";") .setSaslKerberosServiceName("kafka") .setSaslMechanism("GSSAPI") .setKerberosOn("false") + .setUserPasswordOn("false") .setSecurityProtocol("SASL_PLAINTEXT") .setHideInternalColumns(false); diff --git a/presto-main/etc/catalog/kafka.properties b/presto-main/etc/catalog/kafka.properties index c5025f6e6..92acf6132 100644 --- a/presto-main/etc/catalog/kafka.properties +++ b/presto-main/etc/catalog/kafka.properties @@ -3,9 +3,10 @@ kafka.nodes=localhost:9092 kafka.table-names=testTopic kafka.hide-internal-columns=false kerberos.on=false -java.security.auth.login.config=/Users/path/kafka-jaas.conf -java.security.krb5.conf=/Users/path/krb5.conf +java.security.krb5.conf=/Users/mac/Desktop/krb5.conf +sasl.jaas.config= com.sun.security.auth.module.Krb5LoginModule required useKeyTab=true useTicketCache=true serviceName=kafka keyTab=\"/Users/mac/Desktop/user01.keytab\" principal=\"user01@EXAMPLE.COM\"; group.id=testTopic security.protocol=SASL_PLAINTEXT sasl.mechanism=GSSAPI sasl.kerberos.service.name=kafka +user.password.auth.on = false \ No newline at end of file -- Gitee From 133203b5a30f87e188f87a28019f503a9a109571 Mon Sep 17 00:00:00 2001 From: Zhang Jianming Date: Sat, 2 Jul 2022 09:36:43 +0800 Subject: [PATCH 19/30] fix the error --- presto-main/etc/catalog/dc.properties | 4 ++++ presto-main/etc/catalog/hive.properties | 9 +++++++++ 2 files changed, 13 insertions(+) create mode 100644 presto-main/etc/catalog/dc.properties create mode 100644 presto-main/etc/catalog/hive.properties diff --git a/presto-main/etc/catalog/dc.properties b/presto-main/etc/catalog/dc.properties new file mode 100644 index 000000000..51e333ffa --- /dev/null +++ b/presto-main/etc/catalog/dc.properties @@ -0,0 +1,4 @@ +connector.name=dc +connection-url=http://localhost:8090 +connection-user=root +connection-password= \ No newline at end of file diff --git a/presto-main/etc/catalog/hive.properties b/presto-main/etc/catalog/hive.properties new file mode 100644 index 000000000..5f7221544 --- /dev/null +++ b/presto-main/etc/catalog/hive.properties @@ -0,0 +1,9 @@ +# +# WARNING +# ^^^^^^^ +# This configuration file is for development only and should NOT be used +# in production. For example configuration, see the Presto documentation. +# + +connector.name=hive-hadoop2 +hive.metastore.uri=thrift://localhost:9083 \ No newline at end of file -- Gitee From d24a4f729f5acb9200908620585c332fb0fc03ba Mon Sep 17 00:00:00 2001 From: wjswyk Date: Tue, 5 Jul 2022 19:00:57 +0800 Subject: [PATCH 20/30] webui add totalinputbytes, delete cpu usage and free memory heatmap --- .../main/resources/webapp/dist/overview.js | 2 +- .../webapp/src/overview/EchartPart.jsx | 435 +++++++++--------- 2 files changed, 222 insertions(+), 215 deletions(-) diff --git a/presto-main/src/main/resources/webapp/dist/overview.js b/presto-main/src/main/resources/webapp/dist/overview.js index ebc3c0eeb..00525e966 100644 --- a/presto-main/src/main/resources/webapp/dist/overview.js +++ b/presto-main/src/main/resources/webapp/dist/overview.js @@ -26310,7 +26310,7 @@ eval("\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/i /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _echarts = __webpack_require__(/*! echarts/lib/echarts */ \"./node_modules/echarts/lib/echarts.js\");\n\nvar _echarts2 = _interopRequireDefault(_echarts);\n\n__webpack_require__(/*! echarts/lib/chart/line */ \"./node_modules/echarts/lib/chart/line.js\");\n\n__webpack_require__(/*! echarts/lib/chart/treemap */ \"./node_modules/echarts/lib/chart/treemap.js\");\n\n__webpack_require__(/*! echarts/theme/royal */ \"./node_modules/echarts/theme/royal.js\");\n\n__webpack_require__(/*! echarts/lib/component/tooltip */ \"./node_modules/echarts/lib/component/tooltip.js\");\n\n__webpack_require__(/*! echarts/lib/component/title */ \"./node_modules/echarts/lib/component/title.js\");\n\nvar _OverviewActions = __webpack_require__(/*! ./OverviewActions */ \"./overview/OverviewActions.js\");\n\nvar _OverviewActions2 = _interopRequireDefault(_OverviewActions);\n\nvar _OverviewStore = __webpack_require__(/*! ./OverviewStore */ \"./overview/OverviewStore.js\");\n\nvar _OverviewStore2 = _interopRequireDefault(_OverviewStore);\n\nvar _reactSimpleMultiSelect = __webpack_require__(/*! react-simple-multi-select */ \"./node_modules/react-simple-multi-select/build/components/MultiSelect.js\");\n\nvar _reactSimpleMultiSelect2 = _interopRequireDefault(_reactSimpleMultiSelect);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _lodash = __webpack_require__(/*! lodash */ \"./node_modules/lodash/lodash.js\");\n\nvar _lodash2 = _interopRequireDefault(_lodash);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Copyright (C) 2018-2021. Huawei Technologies Co., Ltd. All rights reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar EchartPart = function (_React$Component) {\n _inherits(EchartPart, _React$Component);\n\n function EchartPart(props) {\n _classCallCheck(this, EchartPart);\n\n var _this = _possibleConstructorReturn(this, (EchartPart.__proto__ || Object.getPrototypeOf(EchartPart)).call(this, props));\n\n _this.state = {\n checkStatus: {\n checkOne: true,\n checkTwo: true,\n checkThree: true,\n checkFour: true,\n checkFive: true,\n checkSix: true,\n checkSeven: true,\n heatMapChart: true,\n cpuLoad: true,\n heatMapMemoryChart: true\n },\n itemList: [{ key: \"Cluster CPU Usage\", value: \"heatMapChart\" }, { key: \"Cluster Free Memory\", value: \"heatMapMemoryChart\" }, { key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }],\n selectedItemList: [{ key: \"Cluster CPU Usage\", value: \"heatMapChart\" }, { key: \"Cluster Free Memory\", value: \"heatMapMemoryChart\" }, { key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }],\n chartName: ['Used Query Memory', 'Running Queries', 'Queued Queries', 'Blocked Queries', 'Active Workers', 'Avg Running Tasks', 'Avg CPU cycles per worker'],\n step: 10,\n timer: null,\n chartCpu: [],\n heatMapChart: [],\n heatMapMemoryChart: [],\n chart1: [],\n chart2: [],\n chart3: [],\n chart4: [],\n chart5: [],\n chart6: [],\n chart7: [],\n chartRef: null,\n lastRow: null,\n lastByte: null,\n lastWorker: null,\n memoryInit: false,\n unitArr: ['bytes', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity'],\n lastRefresh: null\n };\n _this.state.chartRef = Object.keys(_this.state.checkStatus), _this._onChange = _this._onChange.bind(_this);\n _this.changeList = _this.changeList.bind(_this);\n _this.resize = _this.resize.bind(_this);\n return _this;\n }\n\n _createClass(EchartPart, [{\n key: \"resize\",\n value: function resize() {\n for (var i = 0; i < this.state.chartRef.length; i++) {\n var ref = this.refs[this.state.chartRef[i]];\n if (!ref.className) {\n var chart = _echarts2.default.init(ref);\n chart.resize({ silent: true });\n }\n }\n }\n }, {\n key: \"changeList\",\n value: function changeList(selectedItemList) {\n var _this2 = this;\n\n this.state.itemList.map(function (item) {\n _this2.state.checkStatus[item.value] = false;\n });\n selectedItemList.map(function (item) {\n _this2.state.checkStatus[item.value] = true;\n });\n var state = this.state;\n state.selectedItemList = selectedItemList;\n this.setState(state);\n }\n }, {\n key: \"changeState\",\n value: function changeState(name) {\n var state = this.state;\n state.checkStatus[name] = !state.checkStatus[name];\n this.setState(state);\n }\n\n //echarts\n\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.setXAxis();\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n _OverviewStore2.default.listen(this._onChange);\n this.lineDatas();\n\n var win = window;\n if (win.addEventListener) {\n win.addEventListener('resize', this.resize, false);\n } else if (win.attachEvent) {\n win.attachEvent('onresize', this.resize);\n } else {\n win.onresize = this.resize;\n }\n $(window).on('resize', this.resize);\n }\n }, {\n key: \"componentWillUnmount\",\n value: function componentWillUnmount() {\n _OverviewStore2.default.unlisten(this._onChange);\n clearInterval(this.state.timer);\n }\n\n //obtained data per sec\n\n }, {\n key: \"lineDatas\",\n value: function lineDatas() {\n this.state.timer = setInterval(function () {\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n }, 1000);\n }\n //refresh line\n\n }, {\n key: \"_onChange\",\n value: function _onChange(data) {\n if (data.requestNum % 2 === 0) {\n if (!this.state.memoryInit && data.memoryData) {\n // let cpuChart=echarts.init(this.refs.cpuLoad);\n // let option=cpuChart.getOption();\n // let memoryInitData=[];\n // let cpuSeries={};\n // let index = 0;\n // Object.keys(data.memoryData).map(key=>{\n // let op = Object.assign({}, option.series[index]);\n // index++;\n // op.name = key.slice(0, key.indexOf(\" \"));\n // let currentCpuData = [...this.delete(this.state.chartCpu), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // op.data = this.state.step === 10 ? currentCpuData.slice(1200) : this.state.step === 20 ? currentCpuData.slice(600) : currentCpuData;\n // op.areaStyle = {\n // shadowBlur: 10,\n // opacity: 0.1\n // };\n // op.type = 'line';\n // op.showSymbol = false;\n // memoryInitData.push(op);\n // cpuSeries[key]= currentCpuData;\n // })\n // option.series=memoryInitData;\n // option.yAxis = {max: 100, min: 0, type: \"value\"};\n // cpuChart.setOption(option);\n\n var _heatMapChart = _echarts2.default.init(this.refs.heatMapChart, \"royal\");\n _heatMapChart.setOption({\n animation: false,\n title: {\n text: 'Cluster CPU Usage',\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'item',\n formatter: function formatter(params, t, cb) {\n return params.name + \" : \" + params.value + \"%\";\n }\n },\n series: [{\n type: 'treemap',\n data: this.state.heatMapChart\n }]\n });\n var _heatMapMemoryChart = _echarts2.default.init(this.refs.heatMapMemoryChart, \"royal\");\n _heatMapMemoryChart.setOption({\n animation: false,\n title: {\n text: 'Cluster Free Memory ',\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'item',\n formatter: function formatter(params, t, cb) {\n return params.name + \" : \" + (0, _utils.formatDataSizeBytes)(params.value);\n }\n },\n series: [{\n type: 'treemap',\n data: this.state.heatMapMemoryChart\n }]\n });\n\n this.setState({\n memoryInit: true\n });\n }\n // else{\n // let dataCpu=this.state.chartCpu;\n // let mychart1=echarts.init(this.refs.cpuLoad);\n // let option=mychart1.getOption();\n // let memoryInitData=option.series;\n // Object.keys(data.memoryData).map(key=>{\n // let dataCpuElement = dataCpu[key];\n // if (_.isUndefined(dataCpuElement)) {\n // let op = Object.assign({}, option.series[index]);\n // op.name = key.slice(0, key.indexOf(\" \"));\n // op.areaStyle = {\n // shadowBlur: 10,\n // opacity: 0.1\n // };\n // op.type = 'line';\n // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // op.data = dataCpu[key];\n // memoryInitData.push(op);\n // }\n // else {\n // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // }\n // for(let i=0,len=memoryInitData.length;i= 600) {\n dataset = dataset.splice(600 - 1, dataset.length - 600 - 1);\n }\n dataset = [].concat(_toConsumableArray(dataset), [newDataPoint]);\n entry.dataset = dataset;\n var sum = 0;\n for (var i = 0; i < dataset.length; i++) {\n sum += dataset[i];\n }\n entry.value = Number((sum / dataset.length).toFixed(2));\n }\n });\n\n var heatMapDataSort = (0, _utils.bubbleSort)(heatMapData);\n this.state.heatMapChart = heatMapDataSort.slice(0, 10);\n var heatMapChart = _echarts2.default.init(this.refs.heatMapChart, \"royal\");\n var heatMapChartOption = heatMapChart.getOption();\n heatMapChartOption.series = [{\n type: \"treemap\",\n data: heatMapDataSort.slice(0, 10),\n breadcrumb: {\n show: false\n }\n }];\n heatMapChart.setOption(heatMapChartOption);\n\n //heatMap memory data\n var heatMapMemoryData = this.state.heatMapMemoryChart;\n Object.keys(data.memoryData).map(function (key) {\n var id = data.memoryData[key].id;\n var name = key;\n var index = _lodash2.default.findIndex(heatMapMemoryData, { id: id });\n var newDataPoint = 0;\n if (typeof data.memoryData[key].pools.general != \"undefined\") {\n newDataPoint += data.memoryData[key].pools.general.freeBytes;\n if (typeof data.memoryData[key].pools.reserved != \"undefined\") {\n newDataPoint += data.memoryData[key].pools.reserved.freeBytes;\n }\n }\n newDataPoint = Number(newDataPoint);\n if (index == -1) {\n var newData = {};\n newData.id = id;\n newData.name = name;\n newData.value = newDataPoint;\n newData.dataset = [newDataPoint];\n newData.children = [];\n heatMapMemoryData.push(newData);\n } else {\n var entry = heatMapMemoryData[index];\n var dataset = entry.dataset;\n if (dataset.length >= 600) {\n dataset = dataset.splice(600 - 1, dataset.length - 600 - 1);\n }\n dataset = [].concat(_toConsumableArray(dataset), [newDataPoint]);\n entry.dataset = dataset;\n var sum = 0;\n for (var i = 0; i < dataset.length; i++) {\n sum += dataset[i];\n }\n entry.value = Number((sum / dataset.length).toFixed(2));\n }\n });\n var heatMapMemoryDataSort = (0, _utils.bubbleSort)(heatMapMemoryData);\n this.state.heatMapMemoryChart = heatMapMemoryDataSort.slice(0, 10);\n var heatMapMemoryChart = _echarts2.default.init(this.refs.heatMapMemoryChart, \"royal\");\n var heatMapMemoryChartOption = heatMapMemoryChart.getOption();\n heatMapMemoryChartOption.series = [{\n type: \"treemap\",\n data: heatMapMemoryDataSort.slice(0, 10),\n breadcrumb: {\n show: false\n }\n }];\n heatMapMemoryChart.setOption(heatMapMemoryChartOption);\n\n var now = Date.now();\n var secondsSinceLastRefresh = this.state.lastRefresh ? (now - this.state.lastRefresh) / 1000.0 : 1;\n secondsSinceLastRefresh = secondsSinceLastRefresh < 1 ? 1 : secondsSinceLastRefresh;\n var lastWorker = this.state.lastWorker ? (data.lineData.totalCpuTimeSecs - this.state.lastWorker) / data.lineData.activeWorkers / secondsSinceLastRefresh : 0;\n this.setState({\n chartCpu: [].concat(_toConsumableArray(this.delete(this.state.chartCpu)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), (data.lineData.systemCpuLoad * 100).toFixed(4)]]),\n chart1: [].concat(_toConsumableArray(this.delete(this.state.chart1)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.reservedMemory]]),\n chart2: [].concat(_toConsumableArray(this.delete(this.state.chart2)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningQueries]]),\n chart3: [].concat(_toConsumableArray(this.delete(this.state.chart3)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.queuedQueries]]),\n chart4: [].concat(_toConsumableArray(this.delete(this.state.chart4)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.blockedQueries]]),\n chart5: [].concat(_toConsumableArray(this.delete(this.state.chart5)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.activeWorkers]]),\n chart6: [].concat(_toConsumableArray(this.delete(this.state.chart6)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningDrivers]]),\n chart7: [].concat(_toConsumableArray(this.delete(this.state.chart7)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), lastWorker]]),\n lastWorker: data.lineData.totalCpuTimeSecs,\n heatMapChart: this.state.heatMapChart,\n heatMapMemoryChart: this.state.heatMapMemoryChart,\n lastRefresh: now\n });\n if (!this.refs.cpuLoad.className) {\n var mychart = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart.getOption();\n option.series[0].data = this.state.step === 10 ? this.state.chartCpu.slice(1200) : this.state.step === 20 ? this.state.chartCpu.slice(600) : this.state.chartCpu;\n option.series[0].areaStyle = {\n color: \"#41BB04\",\n shadowBlur: 10,\n opacity: 0.1\n };\n option.series[0].lineStyle = { color: \"#137113\" };\n option.series[0].itemStyle = { color: \"#137113\" };\n option.yAxis = { max: 100, min: 0, type: \"value\" };\n mychart.setOption(option);\n }\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var _mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option = _mychart.getOption();\n _option.series[0].data = this.state.step === 10 ? this.state['chart' + parseInt(i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(i + 1)].slice(600) : this.state['chart' + parseInt(i + 1)];\n _option.series[0].areaStyle = {\n color: \"#c3c683\",\n shadowBlur: 10,\n opacity: 0.1\n };\n _option.series[0].lineStyle = { color: \"#b6a019\" };\n _option.series[0].itemStyle = { color: \"#b6a019\" };\n _mychart.setOption(_option);\n }\n }\n }\n }\n\n // delete first data\n\n }, {\n key: \"delete\",\n value: function _delete(arr) {\n if (_lodash2.default.isUndefined(arr)) {\n return [];\n }\n arr.splice(0, 1);\n return arr;\n }\n //according to step to set XAxis data\n\n }, {\n key: \"setXAxis\",\n value: function setXAxis() {\n var arr = [];\n for (var i = 0, len = 30 * 60; i < len; i++) {\n arr[i] = [new Date(new Date().getTime() - 1000 * i).format('yyyy-MM-dd hh:mm:ss'), 0];\n }\n arr = arr.reverse();\n this.setState({\n chartCpu: [].concat(_toConsumableArray(arr)),\n chart1: [].concat(_toConsumableArray(arr)),\n chart2: [].concat(_toConsumableArray(arr)),\n chart3: [].concat(_toConsumableArray(arr)),\n chart4: [].concat(_toConsumableArray(arr)),\n chart5: [].concat(_toConsumableArray(arr)),\n chart6: [].concat(_toConsumableArray(arr)),\n chart7: [].concat(_toConsumableArray(arr))\n });\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n mychart1.setOption({\n animation: false,\n title: { text: 'Average Cluster CPU Usage',\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: 'usage(%)',\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n return value;\n }\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: []\n }]\n });\n for (var _i = 0; _i < this.state.chartName.length; _i++) {\n if (!this.refs[this.state.chartRef[_i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[_i]]);\n mychart.setOption({\n animation: false,\n title: {\n text: this.state.chartName[_i],\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: this.state.unitArr[_i],\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function (name, value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n if (name === 'quantity') {\n return (0, _utils.formatCount)(value);\n } else if (name === 'bytes') {\n return (0, _utils.formatDataSizeBytes)(value);\n } else {\n return value;\n }\n }.bind(null, this.state.unitArr[_i])\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: this.state.step === 10 ? this.state['chart' + parseInt(_i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(_i + 1)].slice(600) : this.state['chart' + parseInt(_i + 1)]\n }]\n });\n }\n }\n }\n }, {\n key: \"selected\",\n value: function selected(e) {\n clearInterval(this.state.timer);\n e.preventDefault();\n var val = e.target.selectedIndex === 0 ? 10 : e.target.selectedIndex === 1 ? 20 : 30;\n var state = this.state;\n state.step = val;\n this.setState(state);\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option2 = mychart.getOption();\n _option2.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n // option.series[0].data=[];\n mychart.setOption(_option2);\n }\n }\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart1.getOption();\n option.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n mychart1.setOption(option);\n _OverviewActions2.default.getData();\n this.lineDatas();\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this3 = this;\n\n var style = { height: \"30vh\", width: \"calc(40vw - 80px)\", left: \"center\", top: \"center\" };\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"selectItemContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: \"selectChart multiSelect\" },\n _react2.default.createElement(_reactSimpleMultiSelect2.default, {\n title: \"Select Chart\",\n itemList: this.state.itemList,\n selectedItemList: this.state.selectedItemList,\n changeList: this.changeList,\n isObjectArray: true\n })\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"select-part\" },\n _react2.default.createElement(\n \"select\",\n { onChange: this.selected.bind(this), value: this.state.step },\n _react2.default.createElement(\n \"option\",\n { value: \"10\" },\n \"Last 10 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"20\" },\n \"Last 20 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"30\" },\n \"Last 30 minutes\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainerParent\" },\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: this.state.checkStatus[\"heatMapChart\"] ? 'overviewChart' : 'display-none' },\n _react2.default.createElement(\"div\", { ref: \"heatMapChart\", style: style })\n ),\n _react2.default.createElement(\n \"div\",\n { className: this.state.checkStatus[\"heatMapMemoryChart\"] ? 'overviewChart' : 'display-none' },\n _react2.default.createElement(\"div\", { ref: \"heatMapMemoryChart\", style: style })\n ),\n _react2.default.createElement(\n \"div\",\n { className: this.state.checkStatus[\"cpuLoad\"] ? 'overviewChart' : 'display-none' },\n _react2.default.createElement(\"div\", { ref: \"cpuLoad\", style: style })\n ),\n Object.keys(this.state.checkStatus).map(function (key, index) {\n if (key == 'cpuLoad' || key == 'heatMapChart' || key == 'heatMapMemoryChart') {\n return null;\n }\n return _react2.default.createElement(\n \"div\",\n { className: _this3.state.checkStatus[key] ? 'overviewChart' : 'display-none', key: index },\n _react2.default.createElement(\"div\", { ref: key, style: style })\n );\n })\n )\n )\n );\n }\n }]);\n\n return EchartPart;\n}(_react2.default.Component);\n\nexports.default = EchartPart;\n\n//# sourceURL=webpack:///./overview/EchartPart.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _echarts = __webpack_require__(/*! echarts/lib/echarts */ \"./node_modules/echarts/lib/echarts.js\");\n\nvar _echarts2 = _interopRequireDefault(_echarts);\n\n__webpack_require__(/*! echarts/lib/chart/line */ \"./node_modules/echarts/lib/chart/line.js\");\n\n__webpack_require__(/*! echarts/lib/chart/treemap */ \"./node_modules/echarts/lib/chart/treemap.js\");\n\n__webpack_require__(/*! echarts/theme/royal */ \"./node_modules/echarts/theme/royal.js\");\n\n__webpack_require__(/*! echarts/lib/component/tooltip */ \"./node_modules/echarts/lib/component/tooltip.js\");\n\n__webpack_require__(/*! echarts/lib/component/title */ \"./node_modules/echarts/lib/component/title.js\");\n\nvar _OverviewActions = __webpack_require__(/*! ./OverviewActions */ \"./overview/OverviewActions.js\");\n\nvar _OverviewActions2 = _interopRequireDefault(_OverviewActions);\n\nvar _OverviewStore = __webpack_require__(/*! ./OverviewStore */ \"./overview/OverviewStore.js\");\n\nvar _OverviewStore2 = _interopRequireDefault(_OverviewStore);\n\nvar _reactSimpleMultiSelect = __webpack_require__(/*! react-simple-multi-select */ \"./node_modules/react-simple-multi-select/build/components/MultiSelect.js\");\n\nvar _reactSimpleMultiSelect2 = _interopRequireDefault(_reactSimpleMultiSelect);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _lodash = __webpack_require__(/*! lodash */ \"./node_modules/lodash/lodash.js\");\n\nvar _lodash2 = _interopRequireDefault(_lodash);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Copyright (C) 2018-2021. Huawei Technologies Co., Ltd. All rights reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar EchartPart = function (_React$Component) {\n _inherits(EchartPart, _React$Component);\n\n function EchartPart(props) {\n _classCallCheck(this, EchartPart);\n\n var _this = _possibleConstructorReturn(this, (EchartPart.__proto__ || Object.getPrototypeOf(EchartPart)).call(this, props));\n\n _this.state = {\n checkStatus: {\n checkOne: true,\n checkTwo: true,\n checkThree: true,\n checkFour: true,\n checkFive: true,\n checkSix: true,\n checkSeven: true,\n checkEight: true,\n // heatMapChart: true,\n cpuLoad: true\n // heatMapMemoryChart: true\n },\n itemList: [\n // {key: \"Cluster CPU Usage\", value: \"heatMapChart\"},\n // {key: \"Cluster Free Memory\", value: \"heatMapMemoryChart\"},\n { key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }, { key: \"Input Total Bytes\", value: \"checkEight\" }],\n selectedItemList: [\n // {key: \"Cluster CPU Usage\", value: \"heatMapChart\"},\n // {key: \"Cluster Free Memory\", value: \"heatMapMemoryChart\"},\n { key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }, { key: \"total Input Bytes\", value: \"checkEight\" }],\n chartName: ['Used Query Memory', 'Running Queries', 'Queued Queries', 'Blocked Queries', 'Active Workers', 'Avg Running Tasks', 'Avg CPU cycles per worker', 'total Input Bytes'],\n step: 10,\n timer: null,\n chartCpu: [],\n // heatMapChart: [],\n // heatMapMemoryChart: [],\n chart1: [],\n chart2: [],\n chart3: [],\n chart4: [],\n chart5: [],\n chart6: [],\n chart7: [],\n chart8: [],\n chartRef: null,\n lastRow: null,\n lastByte: null,\n lastWorker: null,\n memoryInit: false,\n unitArr: ['bytes', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'bytes'],\n lastRefresh: null\n };\n _this.state.chartRef = Object.keys(_this.state.checkStatus), _this._onChange = _this._onChange.bind(_this);\n _this.changeList = _this.changeList.bind(_this);\n _this.resize = _this.resize.bind(_this);\n return _this;\n }\n\n _createClass(EchartPart, [{\n key: \"resize\",\n value: function resize() {\n for (var i = 0; i < this.state.chartRef.length; i++) {\n var ref = this.refs[this.state.chartRef[i]];\n if (!ref.className) {\n var chart = _echarts2.default.init(ref);\n chart.resize({ silent: true });\n }\n }\n }\n }, {\n key: \"changeList\",\n value: function changeList(selectedItemList) {\n var _this2 = this;\n\n this.state.itemList.map(function (item) {\n _this2.state.checkStatus[item.value] = false;\n });\n selectedItemList.map(function (item) {\n _this2.state.checkStatus[item.value] = true;\n });\n var state = this.state;\n state.selectedItemList = selectedItemList;\n this.setState(state);\n }\n }, {\n key: \"changeState\",\n value: function changeState(name) {\n var state = this.state;\n state.checkStatus[name] = !state.checkStatus[name];\n this.setState(state);\n }\n\n //echarts\n\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.setXAxis();\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n _OverviewStore2.default.listen(this._onChange);\n this.lineDatas();\n\n var win = window;\n if (win.addEventListener) {\n win.addEventListener('resize', this.resize, false);\n } else if (win.attachEvent) {\n win.attachEvent('onresize', this.resize);\n } else {\n win.onresize = this.resize;\n }\n $(window).on('resize', this.resize);\n }\n }, {\n key: \"componentWillUnmount\",\n value: function componentWillUnmount() {\n _OverviewStore2.default.unlisten(this._onChange);\n clearInterval(this.state.timer);\n }\n\n //obtained data per sec\n\n }, {\n key: \"lineDatas\",\n value: function lineDatas() {\n this.state.timer = setInterval(function () {\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n }, 1000);\n }\n //refresh line\n\n }, {\n key: \"_onChange\",\n value: function _onChange(data) {\n if (data.requestNum % 2 === 0) {\n if (!this.state.memoryInit && data.memoryData) {\n // let cpuChart=echarts.init(this.refs.cpuLoad);\n // let option=cpuChart.getOption();\n // let memoryInitData=[];\n // let cpuSeries={};\n // let index = 0;\n // Object.keys(data.memoryData).map(key=>{\n // let op = Object.assign({}, option.series[index]);\n // index++;\n // op.name = key.slice(0, key.indexOf(\" \"));\n // let currentCpuData = [...this.delete(this.state.chartCpu), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // op.data = this.state.step === 10 ? currentCpuData.slice(1200) : this.state.step === 20 ? currentCpuData.slice(600) : currentCpuData;\n // op.areaStyle = {\n // shadowBlur: 10,\n // opacity: 0.1\n // };\n // op.type = 'line';\n // op.showSymbol = false;\n // memoryInitData.push(op);\n // cpuSeries[key]= currentCpuData;\n // })\n // option.series=memoryInitData;\n // option.yAxis = {max: 100, min: 0, type: \"value\"};\n // cpuChart.setOption(option);\n\n // let heatMapChart = echarts.init(this.refs.heatMapChart, \"royal\");\n // heatMapChart.setOption({\n // animation: false,\n // title: {\n // text: 'Cluster CPU Usage',\n // left: 'center',\n // textStyle: {\n // color: \"#767676\",\n // fontSize: 16\n // }\n // },\n // tooltip:{\n // trigger:'item',\n // formatter: function (params, t, cb) {\n // return params.name + \" : \" + params.value+\"%\";\n // }\n // },\n // series: [{\n // type: 'treemap',\n // data: this.state.heatMapChart\n // }]\n // })\n // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, \"royal\");\n // heatMapMemoryChart.setOption({\n // animation: false,\n // title: {\n // text: 'Cluster Free Memory ',\n // left: 'center',\n // textStyle: {\n // color: \"#767676\",\n // fontSize: 16\n // }\n // },\n // tooltip:{\n // trigger:'item',\n // formatter: function (params, t, cb) {\n // return params.name + \" : \" + formatDataSizeBytes(params.value);\n // }\n // },\n // series: [{\n // type: 'treemap',\n // data: this.state.heatMapMemoryChart\n // }]\n // })\n\n this.setState({\n memoryInit: true\n });\n }\n // else{\n // let dataCpu=this.state.chartCpu;\n // let mychart1=echarts.init(this.refs.cpuLoad);\n // let option=mychart1.getOption();\n // let memoryInitData=option.series;\n // Object.keys(data.memoryData).map(key=>{\n // let dataCpuElement = dataCpu[key];\n // if (_.isUndefined(dataCpuElement)) {\n // let op = Object.assign({}, option.series[index]);\n // op.name = key.slice(0, key.indexOf(\" \"));\n // op.areaStyle = {\n // shadowBlur: 10,\n // opacity: 0.1\n // };\n // op.type = 'line';\n // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // op.data = dataCpu[key];\n // memoryInitData.push(op);\n // }\n // else {\n // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // }\n // for(let i=0,len=memoryInitData.length;i {\n // let id = data.memoryData[key].id;\n // let name = key;\n // let index = _.findIndex(heatMapData, {id: id});\n // let newDataPoint = Number((data.memoryData[key].systemCpuLoad * 100).toFixed(2));\n // if (index == -1) {\n // let newData = {};\n // newData.id = id;\n // newData.name = name;\n // newData.value = newDataPoint\n // newData.dataset = [newDataPoint];\n // newData.children = [];\n // heatMapData.push(newData);\n // }\n // else {\n // let entry = heatMapData[index];\n // let dataset = entry.dataset;\n // if (dataset.length >= 600) {\n // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1);\n // }\n // dataset = [...dataset, newDataPoint]\n // entry.dataset = dataset;\n // let sum = 0;\n // for (let i = 0; i < dataset.length; i++) {\n // sum += dataset[i];\n // }\n // entry.value = Number((sum / dataset.length).toFixed(2));\n // }\n // });\n //\n // let heatMapDataSort = bubbleSort(heatMapData);\n // this.state.heatMapChart = heatMapDataSort.slice(0,10);\n // let heatMapChart = echarts.init(this.refs.heatMapChart, \"royal\");\n // let heatMapChartOption = heatMapChart.getOption();\n // heatMapChartOption.series = [{\n // type: \"treemap\",\n // data: heatMapDataSort.slice(0,10),\n // breadcrumb: {\n // show: false\n // }\n // }];\n // heatMapChart.setOption(heatMapChartOption);\n //\n // //heatMap memory data\n // let heatMapMemoryData = this.state.heatMapMemoryChart;\n // Object.keys(data.memoryData).map(key => {\n // let id = data.memoryData[key].id;\n // let name = key;\n // let index = _.findIndex(heatMapMemoryData, {id: id});\n // let newDataPoint = 0;\n // if (typeof (data.memoryData[key].pools.general) != \"undefined\"){\n // newDataPoint += data.memoryData[key].pools.general.freeBytes;\n // if (typeof (data.memoryData[key].pools.reserved) != \"undefined\"){\n // newDataPoint += data.memoryData[key].pools.reserved.freeBytes;\n // }\n // }\n // newDataPoint = Number(newDataPoint);\n // if (index == -1) {\n // let newData = {};\n // newData.id = id;\n // newData.name = name;\n // newData.value = newDataPoint\n // newData.dataset = [newDataPoint];\n // newData.children = [];\n // heatMapMemoryData.push(newData);\n // }\n // else {\n // let entry = heatMapMemoryData[index];\n // let dataset = entry.dataset;\n // if (dataset.length >= 600) {\n // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1);\n // }\n // dataset = [...dataset, newDataPoint]\n // entry.dataset = dataset;\n // let sum = 0;\n // for (let i = 0; i < dataset.length; i++) {\n // sum += dataset[i];\n // }\n // entry.value = Number((sum / dataset.length).toFixed(2));\n // }\n // });\n // let heatMapMemoryDataSort = bubbleSort(heatMapMemoryData);\n // this.state.heatMapMemoryChart = heatMapMemoryDataSort.slice(0,10);\n // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, \"royal\");\n // let heatMapMemoryChartOption = heatMapMemoryChart.getOption();\n // heatMapMemoryChartOption.series = [{\n // type: \"treemap\",\n // data: heatMapMemoryDataSort.slice(0,10),\n // breadcrumb: {\n // show: false\n // },\n // }];\n // heatMapMemoryChart.setOption(heatMapMemoryChartOption);\n\n var now = Date.now();\n var secondsSinceLastRefresh = this.state.lastRefresh ? (now - this.state.lastRefresh) / 1000.0 : 1;\n secondsSinceLastRefresh = secondsSinceLastRefresh < 1 ? 1 : secondsSinceLastRefresh;\n var lastWorker = this.state.lastWorker ? (data.lineData.totalCpuTimeSecs - this.state.lastWorker) / data.lineData.activeWorkers / secondsSinceLastRefresh : 0;\n this.setState({\n chartCpu: [].concat(_toConsumableArray(this.delete(this.state.chartCpu)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), (data.lineData.systemCpuLoad * 100).toFixed(4)]]),\n chart1: [].concat(_toConsumableArray(this.delete(this.state.chart1)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.reservedMemory]]),\n chart2: [].concat(_toConsumableArray(this.delete(this.state.chart2)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningQueries]]),\n chart3: [].concat(_toConsumableArray(this.delete(this.state.chart3)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.queuedQueries]]),\n chart4: [].concat(_toConsumableArray(this.delete(this.state.chart4)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.blockedQueries]]),\n chart5: [].concat(_toConsumableArray(this.delete(this.state.chart5)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.activeWorkers]]),\n chart6: [].concat(_toConsumableArray(this.delete(this.state.chart6)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningDrivers]]),\n chart7: [].concat(_toConsumableArray(this.delete(this.state.chart7)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), lastWorker]]),\n chart8: [].concat(_toConsumableArray(this.delete(this.state.chart8)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.totalInputBytes]]),\n lastWorker: data.lineData.totalCpuTimeSecs,\n // heatMapChart: this.state.heatMapChart,\n // heatMapMemoryChart: this.state.heatMapMemoryChart,\n lastRefresh: now\n });\n if (!this.refs.cpuLoad.className) {\n var mychart = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart.getOption();\n option.series[0].data = this.state.step === 10 ? this.state.chartCpu.slice(1200) : this.state.step === 20 ? this.state.chartCpu.slice(600) : this.state.chartCpu;\n option.series[0].areaStyle = {\n color: \"#41BB04\",\n shadowBlur: 10,\n opacity: 0.1\n };\n option.series[0].lineStyle = { color: \"#137113\" };\n option.series[0].itemStyle = { color: \"#137113\" };\n option.yAxis = { max: 100, min: 0, type: \"value\" };\n mychart.setOption(option);\n }\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var _mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option = _mychart.getOption();\n _option.series[0].data = this.state.step === 10 ? this.state['chart' + parseInt(i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(i + 1)].slice(600) : this.state['chart' + parseInt(i + 1)];\n _option.series[0].areaStyle = {\n color: \"#c3c683\",\n shadowBlur: 10,\n opacity: 0.1\n };\n _option.series[0].lineStyle = { color: \"#b6a019\" };\n _option.series[0].itemStyle = { color: \"#b6a019\" };\n _mychart.setOption(_option);\n }\n }\n }\n }\n\n // delete first data\n\n }, {\n key: \"delete\",\n value: function _delete(arr) {\n if (_lodash2.default.isUndefined(arr)) {\n return [];\n }\n arr.splice(0, 1);\n return arr;\n }\n //according to step to set XAxis data\n\n }, {\n key: \"setXAxis\",\n value: function setXAxis() {\n var arr = [];\n for (var i = 0, len = 30 * 60; i < len; i++) {\n arr[i] = [new Date(new Date().getTime() - 1000 * i).format('yyyy-MM-dd hh:mm:ss'), 0];\n }\n arr = arr.reverse();\n this.setState({\n chartCpu: [].concat(_toConsumableArray(arr)),\n chart1: [].concat(_toConsumableArray(arr)),\n chart2: [].concat(_toConsumableArray(arr)),\n chart3: [].concat(_toConsumableArray(arr)),\n chart4: [].concat(_toConsumableArray(arr)),\n chart5: [].concat(_toConsumableArray(arr)),\n chart6: [].concat(_toConsumableArray(arr)),\n chart7: [].concat(_toConsumableArray(arr)),\n chart8: [].concat(_toConsumableArray(arr))\n });\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n mychart1.setOption({\n animation: false,\n title: { text: 'Average Cluster CPU Usage',\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: 'usage(%)',\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n return value;\n }\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: []\n }]\n });\n for (var _i = 0; _i < this.state.chartName.length; _i++) {\n if (!this.refs[this.state.chartRef[_i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[_i]]);\n mychart.setOption({\n animation: false,\n title: {\n text: this.state.chartName[_i],\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: this.state.unitArr[_i],\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function (name, value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n if (name === 'quantity') {\n return (0, _utils.formatCount)(value);\n } else if (name === 'bytes') {\n return (0, _utils.formatDataSizeBytes)(value);\n } else {\n return value;\n }\n }.bind(null, this.state.unitArr[_i])\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: this.state.step === 10 ? this.state['chart' + parseInt(_i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(_i + 1)].slice(600) : this.state['chart' + parseInt(_i + 1)]\n }]\n });\n }\n }\n }\n }, {\n key: \"selected\",\n value: function selected(e) {\n clearInterval(this.state.timer);\n e.preventDefault();\n var val = e.target.selectedIndex === 0 ? 10 : e.target.selectedIndex === 1 ? 20 : 30;\n var state = this.state;\n state.step = val;\n this.setState(state);\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option2 = mychart.getOption();\n _option2.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n // option.series[0].data=[];\n mychart.setOption(_option2);\n }\n }\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart1.getOption();\n option.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n mychart1.setOption(option);\n _OverviewActions2.default.getData();\n this.lineDatas();\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this3 = this;\n\n var style = { height: \"30vh\", width: \"calc(40vw - 80px)\", left: \"center\", top: \"center\" };\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"selectItemContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: \"selectChart multiSelect\" },\n _react2.default.createElement(_reactSimpleMultiSelect2.default, {\n title: \"Select Chart\",\n itemList: this.state.itemList,\n selectedItemList: this.state.selectedItemList,\n changeList: this.changeList,\n isObjectArray: true\n })\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"select-part\" },\n _react2.default.createElement(\n \"select\",\n { onChange: this.selected.bind(this), value: this.state.step },\n _react2.default.createElement(\n \"option\",\n { value: \"10\" },\n \"Last 10 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"20\" },\n \"Last 20 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"30\" },\n \"Last 30 minutes\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainerParent\" },\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: this.state.checkStatus[\"cpuLoad\"] ? 'overviewChart' : 'display-none' },\n _react2.default.createElement(\"div\", { ref: \"cpuLoad\", style: style })\n ),\n Object.keys(this.state.checkStatus).map(function (key, index) {\n if (key == 'cpuLoad') {\n return null;\n }\n return _react2.default.createElement(\n \"div\",\n { className: _this3.state.checkStatus[key] ? 'overviewChart' : 'display-none', key: index },\n _react2.default.createElement(\"div\", { ref: key, style: style })\n );\n })\n )\n )\n );\n }\n }]);\n\n return EchartPart;\n}(_react2.default.Component);\n\nexports.default = EchartPart;\n\n//# sourceURL=webpack:///./overview/EchartPart.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx b/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx index 882802d92..4a5c953f5 100644 --- a/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx +++ b/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx @@ -37,13 +37,14 @@ class EchartPart extends React.Component{ checkFive:true, checkSix:true, checkSeven:true, - heatMapChart: true, - cpuLoad: true, - heatMapMemoryChart: true + checkEight:true, + // heatMapChart: true, + cpuLoad: true + // heatMapMemoryChart: true }, itemList: [ - {key: "Cluster CPU Usage", value: "heatMapChart"}, - {key: "Cluster Free Memory", value: "heatMapMemoryChart"}, + // {key: "Cluster CPU Usage", value: "heatMapChart"}, + // {key: "Cluster Free Memory", value: "heatMapMemoryChart"}, {key: "Avg Cluster CPU Usage", value: "cpuLoad"}, {key: "Used Query Memory", value: "checkOne"}, {key: "Running Queries", value: "checkTwo"}, @@ -51,11 +52,12 @@ class EchartPart extends React.Component{ {key: "Blocked Queries", value: "checkFour"}, {key: "Active Workers", value: "checkFive"}, {key: "Avg Running Tasks", value: "checkSix"}, - {key: "Avg CPU cycles per worker", value: "checkSeven"} - ], + {key: "Avg CPU cycles per worker", value: "checkSeven"}, + {key: "Input Total Bytes", value: "checkEight"} + ], selectedItemList: [ - {key: "Cluster CPU Usage", value: "heatMapChart"}, - {key: "Cluster Free Memory", value: "heatMapMemoryChart"}, + // {key: "Cluster CPU Usage", value: "heatMapChart"}, + // {key: "Cluster Free Memory", value: "heatMapMemoryChart"}, {key: "Avg Cluster CPU Usage", value: "cpuLoad"}, {key: "Used Query Memory", value: "checkOne"}, {key: "Running Queries", value: "checkTwo"}, @@ -63,14 +65,15 @@ class EchartPart extends React.Component{ {key: "Blocked Queries", value: "checkFour"}, {key: "Active Workers", value: "checkFive"}, {key: "Avg Running Tasks", value: "checkSix"}, - {key: "Avg CPU cycles per worker", value: "checkSeven"} - ], - chartName:['Used Query Memory', 'Running Queries', 'Queued Queries', 'Blocked Queries', 'Active Workers', 'Avg Running Tasks', 'Avg CPU cycles per worker'], + {key: "Avg CPU cycles per worker", value: "checkSeven"}, + {key: "total Input Bytes", value: "checkEight"} + ], + chartName:['Used Query Memory', 'Running Queries', 'Queued Queries', 'Blocked Queries', 'Active Workers', 'Avg Running Tasks', 'Avg CPU cycles per worker','total Input Bytes'], step:10, timer:null, chartCpu:[], - heatMapChart: [], - heatMapMemoryChart: [], + // heatMapChart: [], + // heatMapMemoryChart: [], chart1:[], chart2:[], chart3:[], @@ -78,16 +81,17 @@ class EchartPart extends React.Component{ chart5:[], chart6:[], chart7:[], + chart8:[], chartRef:null, lastRow:null, lastByte:null, lastWorker:null, memoryInit:false, - unitArr:['bytes','quantity','quantity','quantity','quantity','quantity','quantity'], + unitArr:['bytes','quantity','quantity','quantity','quantity','quantity','quantity','bytes'], lastRefresh: null }; this.state.chartRef = Object.keys(this.state.checkStatus), - this._onChange=this._onChange.bind(this); + this._onChange=this._onChange.bind(this); this.changeList = this.changeList.bind(this); this.resize = this.resize.bind(this); } @@ -151,74 +155,74 @@ class EchartPart extends React.Component{ _onChange(data){ if(data.requestNum%2===0){ if(!this.state.memoryInit && data.memoryData){ - // let cpuChart=echarts.init(this.refs.cpuLoad); - // let option=cpuChart.getOption(); - // let memoryInitData=[]; - // let cpuSeries={}; - // let index = 0; - // Object.keys(data.memoryData).map(key=>{ - // let op = Object.assign({}, option.series[index]); - // index++; - // op.name = key.slice(0, key.indexOf(" ")); - // let currentCpuData = [...this.delete(this.state.chartCpu), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]]; - // op.data = this.state.step === 10 ? currentCpuData.slice(1200) : this.state.step === 20 ? currentCpuData.slice(600) : currentCpuData; - // op.areaStyle = { - // shadowBlur: 10, - // opacity: 0.1 - // }; - // op.type = 'line'; - // op.showSymbol = false; - // memoryInitData.push(op); - // cpuSeries[key]= currentCpuData; - // }) - // option.series=memoryInitData; - // option.yAxis = {max: 100, min: 0, type: "value"}; - // cpuChart.setOption(option); + // let cpuChart=echarts.init(this.refs.cpuLoad); + // let option=cpuChart.getOption(); + // let memoryInitData=[]; + // let cpuSeries={}; + // let index = 0; + // Object.keys(data.memoryData).map(key=>{ + // let op = Object.assign({}, option.series[index]); + // index++; + // op.name = key.slice(0, key.indexOf(" ")); + // let currentCpuData = [...this.delete(this.state.chartCpu), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]]; + // op.data = this.state.step === 10 ? currentCpuData.slice(1200) : this.state.step === 20 ? currentCpuData.slice(600) : currentCpuData; + // op.areaStyle = { + // shadowBlur: 10, + // opacity: 0.1 + // }; + // op.type = 'line'; + // op.showSymbol = false; + // memoryInitData.push(op); + // cpuSeries[key]= currentCpuData; + // }) + // option.series=memoryInitData; + // option.yAxis = {max: 100, min: 0, type: "value"}; + // cpuChart.setOption(option); - let heatMapChart = echarts.init(this.refs.heatMapChart, "royal"); - heatMapChart.setOption({ - animation: false, - title: { - text: 'Cluster CPU Usage', - left: 'center', - textStyle: { - color: "#767676", - fontSize: 16 - } - }, - tooltip:{ - trigger:'item', - formatter: function (params, t, cb) { - return params.name + " : " + params.value+"%"; - } - }, - series: [{ - type: 'treemap', - data: this.state.heatMapChart - }] - }) - let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, "royal"); - heatMapMemoryChart.setOption({ - animation: false, - title: { - text: 'Cluster Free Memory ', - left: 'center', - textStyle: { - color: "#767676", - fontSize: 16 - } - }, - tooltip:{ - trigger:'item', - formatter: function (params, t, cb) { - return params.name + " : " + formatDataSizeBytes(params.value); - } - }, - series: [{ - type: 'treemap', - data: this.state.heatMapMemoryChart - }] - }) + // let heatMapChart = echarts.init(this.refs.heatMapChart, "royal"); + // heatMapChart.setOption({ + // animation: false, + // title: { + // text: 'Cluster CPU Usage', + // left: 'center', + // textStyle: { + // color: "#767676", + // fontSize: 16 + // } + // }, + // tooltip:{ + // trigger:'item', + // formatter: function (params, t, cb) { + // return params.name + " : " + params.value+"%"; + // } + // }, + // series: [{ + // type: 'treemap', + // data: this.state.heatMapChart + // }] + // }) + // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, "royal"); + // heatMapMemoryChart.setOption({ + // animation: false, + // title: { + // text: 'Cluster Free Memory ', + // left: 'center', + // textStyle: { + // color: "#767676", + // fontSize: 16 + // } + // }, + // tooltip:{ + // trigger:'item', + // formatter: function (params, t, cb) { + // return params.name + " : " + formatDataSizeBytes(params.value); + // } + // }, + // series: [{ + // type: 'treemap', + // data: this.state.heatMapMemoryChart + // }] + // }) this.setState({ memoryInit:true @@ -259,100 +263,100 @@ class EchartPart extends React.Component{ // }) // } //heatMap data - let heatMapData = this.state.heatMapChart; - Object.keys(data.memoryData).map(key => { - let id = data.memoryData[key].id; - let name = key; - let index = _.findIndex(heatMapData, {id: id}); - let newDataPoint = Number((data.memoryData[key].systemCpuLoad * 100).toFixed(2)); - if (index == -1) { - let newData = {}; - newData.id = id; - newData.name = name; - newData.value = newDataPoint - newData.dataset = [newDataPoint]; - newData.children = []; - heatMapData.push(newData); - } - else { - let entry = heatMapData[index]; - let dataset = entry.dataset; - if (dataset.length >= 600) { - dataset = dataset.splice(600 - 1, dataset.length - 600 - 1); - } - dataset = [...dataset, newDataPoint] - entry.dataset = dataset; - let sum = 0; - for (let i = 0; i < dataset.length; i++) { - sum += dataset[i]; - } - entry.value = Number((sum / dataset.length).toFixed(2)); - } - }); - - let heatMapDataSort = bubbleSort(heatMapData); - this.state.heatMapChart = heatMapDataSort.slice(0,10); - let heatMapChart = echarts.init(this.refs.heatMapChart, "royal"); - let heatMapChartOption = heatMapChart.getOption(); - heatMapChartOption.series = [{ - type: "treemap", - data: heatMapDataSort.slice(0,10), - breadcrumb: { - show: false - } - }]; - heatMapChart.setOption(heatMapChartOption); - - //heatMap memory data - let heatMapMemoryData = this.state.heatMapMemoryChart; - Object.keys(data.memoryData).map(key => { - let id = data.memoryData[key].id; - let name = key; - let index = _.findIndex(heatMapMemoryData, {id: id}); - let newDataPoint = 0; - if (typeof (data.memoryData[key].pools.general) != "undefined"){ - newDataPoint += data.memoryData[key].pools.general.freeBytes; - if (typeof (data.memoryData[key].pools.reserved) != "undefined"){ - newDataPoint += data.memoryData[key].pools.reserved.freeBytes; - } - } - newDataPoint = Number(newDataPoint); - if (index == -1) { - let newData = {}; - newData.id = id; - newData.name = name; - newData.value = newDataPoint - newData.dataset = [newDataPoint]; - newData.children = []; - heatMapMemoryData.push(newData); - } - else { - let entry = heatMapMemoryData[index]; - let dataset = entry.dataset; - if (dataset.length >= 600) { - dataset = dataset.splice(600 - 1, dataset.length - 600 - 1); - } - dataset = [...dataset, newDataPoint] - entry.dataset = dataset; - let sum = 0; - for (let i = 0; i < dataset.length; i++) { - sum += dataset[i]; - } - entry.value = Number((sum / dataset.length).toFixed(2)); - } - }); - let heatMapMemoryDataSort = bubbleSort(heatMapMemoryData); - this.state.heatMapMemoryChart = heatMapMemoryDataSort.slice(0,10); - let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, "royal"); - let heatMapMemoryChartOption = heatMapMemoryChart.getOption(); - heatMapMemoryChartOption.series = [{ - type: "treemap", - data: heatMapMemoryDataSort.slice(0,10), - breadcrumb: { - show: false - }, - }]; - heatMapMemoryChart.setOption(heatMapMemoryChartOption); + // let heatMapData = this.state.heatMapChart; + // Object.keys(data.memoryData).map(key => { + // let id = data.memoryData[key].id; + // let name = key; + // let index = _.findIndex(heatMapData, {id: id}); + // let newDataPoint = Number((data.memoryData[key].systemCpuLoad * 100).toFixed(2)); + // if (index == -1) { + // let newData = {}; + // newData.id = id; + // newData.name = name; + // newData.value = newDataPoint + // newData.dataset = [newDataPoint]; + // newData.children = []; + // heatMapData.push(newData); + // } + // else { + // let entry = heatMapData[index]; + // let dataset = entry.dataset; + // if (dataset.length >= 600) { + // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1); + // } + // dataset = [...dataset, newDataPoint] + // entry.dataset = dataset; + // let sum = 0; + // for (let i = 0; i < dataset.length; i++) { + // sum += dataset[i]; + // } + // entry.value = Number((sum / dataset.length).toFixed(2)); + // } + // }); + // + // let heatMapDataSort = bubbleSort(heatMapData); + // this.state.heatMapChart = heatMapDataSort.slice(0,10); + // let heatMapChart = echarts.init(this.refs.heatMapChart, "royal"); + // let heatMapChartOption = heatMapChart.getOption(); + // heatMapChartOption.series = [{ + // type: "treemap", + // data: heatMapDataSort.slice(0,10), + // breadcrumb: { + // show: false + // } + // }]; + // heatMapChart.setOption(heatMapChartOption); + // + // //heatMap memory data + // let heatMapMemoryData = this.state.heatMapMemoryChart; + // Object.keys(data.memoryData).map(key => { + // let id = data.memoryData[key].id; + // let name = key; + // let index = _.findIndex(heatMapMemoryData, {id: id}); + // let newDataPoint = 0; + // if (typeof (data.memoryData[key].pools.general) != "undefined"){ + // newDataPoint += data.memoryData[key].pools.general.freeBytes; + // if (typeof (data.memoryData[key].pools.reserved) != "undefined"){ + // newDataPoint += data.memoryData[key].pools.reserved.freeBytes; + // } + // } + // newDataPoint = Number(newDataPoint); + // if (index == -1) { + // let newData = {}; + // newData.id = id; + // newData.name = name; + // newData.value = newDataPoint + // newData.dataset = [newDataPoint]; + // newData.children = []; + // heatMapMemoryData.push(newData); + // } + // else { + // let entry = heatMapMemoryData[index]; + // let dataset = entry.dataset; + // if (dataset.length >= 600) { + // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1); + // } + // dataset = [...dataset, newDataPoint] + // entry.dataset = dataset; + // let sum = 0; + // for (let i = 0; i < dataset.length; i++) { + // sum += dataset[i]; + // } + // entry.value = Number((sum / dataset.length).toFixed(2)); + // } + // }); + // let heatMapMemoryDataSort = bubbleSort(heatMapMemoryData); + // this.state.heatMapMemoryChart = heatMapMemoryDataSort.slice(0,10); + // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, "royal"); + // let heatMapMemoryChartOption = heatMapMemoryChart.getOption(); + // heatMapMemoryChartOption.series = [{ + // type: "treemap", + // data: heatMapMemoryDataSort.slice(0,10), + // breadcrumb: { + // show: false + // }, + // }]; + // heatMapMemoryChart.setOption(heatMapMemoryChartOption); let now = Date.now(); let secondsSinceLastRefresh = this.state.lastRefresh ? (now - this.state.lastRefresh) / 1000.0 : 1; @@ -367,9 +371,11 @@ class EchartPart extends React.Component{ chart5:[...this.delete(this.state.chart5),[new Date().format('yyyy-MM-dd hh:mm:ss'),data.lineData.activeWorkers]], chart6:[...this.delete(this.state.chart6),[new Date().format('yyyy-MM-dd hh:mm:ss'),data.lineData.runningDrivers]], chart7:[...this.delete(this.state.chart7),[new Date().format('yyyy-MM-dd hh:mm:ss'),lastWorker]], + chart8:[...this.delete(this.state.chart8),[new Date().format('yyyy-MM-dd hh:mm:ss'),data.lineData.totalInputBytes + ]], lastWorker:data.lineData.totalCpuTimeSecs, - heatMapChart: this.state.heatMapChart, - heatMapMemoryChart: this.state.heatMapMemoryChart, + // heatMapChart: this.state.heatMapChart, + // heatMapMemoryChart: this.state.heatMapMemoryChart, lastRefresh: now }); if (!this.refs.cpuLoad.className) { @@ -428,6 +434,7 @@ class EchartPart extends React.Component{ chart5:[...arr], chart6:[...arr], chart7:[...arr], + chart8:[...arr], }); let mychart1=echarts.init(this.refs.cpuLoad); mychart1.setOption({ @@ -483,13 +490,13 @@ class EchartPart extends React.Component{ mychart.setOption({ animation: false, title:{ - text:this.state.chartName[i], - left:'center', + text:this.state.chartName[i], + left:'center', textStyle: { color: "#767676", fontSize: 16 - } - }, + } + }, tooltip:{ trigger:'axis' }, @@ -569,45 +576,45 @@ class EchartPart extends React.Component{ return(
-
- + -
-
- -
-
-
-
-
-
+ />
-
-
+
+
-
-
-
- {Object.keys(this.state.checkStatus).map((key, index) => { - if (key == 'cpuLoad' || key == 'heatMapChart' || key == 'heatMapMemoryChart') { - return null; - } - return ( -
-
-
- ) - })}
+
+
+ {/*
*/} + {/*
*/} + {/*
*/} + {/*
*/} + {/*
*/} + {/*
*/} +
+
+
+ {Object.keys(this.state.checkStatus).map((key, index) => { + if (key == 'cpuLoad' ) { + return null; + } + return ( +
+
+
+ ) + })} +
-- Gitee From 3fc72e845ab3d2471ffe1036c6e36b6f2714bf71 Mon Sep 17 00:00:00 2001 From: YijianCheng Date: Tue, 12 Jul 2022 10:09:16 +0800 Subject: [PATCH 21/30] modify version --- hetu-mpp/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hetu-mpp/pom.xml b/hetu-mpp/pom.xml index ce5a0d8ab..673a13d21 100644 --- a/hetu-mpp/pom.xml +++ b/hetu-mpp/pom.xml @@ -7,7 +7,7 @@ io.hetu.core presto-root - 1.7.0-SNAPSHOT + 1.8.0-SNAPSHOT hetu-mpp -- Gitee From e94236600982322a3e09cb00810be40a8ea8acd6 Mon Sep 17 00:00:00 2001 From: wjswyk Date: Tue, 12 Jul 2022 12:19:28 +0800 Subject: [PATCH 22/30] Remove unnecessary comments --- .../main/resources/webapp/dist/overview.js | 2 +- .../webapp/src/overview/EchartPart.jsx | 216 ------------------ 2 files changed, 1 insertion(+), 217 deletions(-) diff --git a/presto-main/src/main/resources/webapp/dist/overview.js b/presto-main/src/main/resources/webapp/dist/overview.js index 00525e966..92d5c592e 100644 --- a/presto-main/src/main/resources/webapp/dist/overview.js +++ b/presto-main/src/main/resources/webapp/dist/overview.js @@ -26310,7 +26310,7 @@ eval("\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/i /***/ (function(module, exports, __webpack_require__) { "use strict"; -eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _echarts = __webpack_require__(/*! echarts/lib/echarts */ \"./node_modules/echarts/lib/echarts.js\");\n\nvar _echarts2 = _interopRequireDefault(_echarts);\n\n__webpack_require__(/*! echarts/lib/chart/line */ \"./node_modules/echarts/lib/chart/line.js\");\n\n__webpack_require__(/*! echarts/lib/chart/treemap */ \"./node_modules/echarts/lib/chart/treemap.js\");\n\n__webpack_require__(/*! echarts/theme/royal */ \"./node_modules/echarts/theme/royal.js\");\n\n__webpack_require__(/*! echarts/lib/component/tooltip */ \"./node_modules/echarts/lib/component/tooltip.js\");\n\n__webpack_require__(/*! echarts/lib/component/title */ \"./node_modules/echarts/lib/component/title.js\");\n\nvar _OverviewActions = __webpack_require__(/*! ./OverviewActions */ \"./overview/OverviewActions.js\");\n\nvar _OverviewActions2 = _interopRequireDefault(_OverviewActions);\n\nvar _OverviewStore = __webpack_require__(/*! ./OverviewStore */ \"./overview/OverviewStore.js\");\n\nvar _OverviewStore2 = _interopRequireDefault(_OverviewStore);\n\nvar _reactSimpleMultiSelect = __webpack_require__(/*! react-simple-multi-select */ \"./node_modules/react-simple-multi-select/build/components/MultiSelect.js\");\n\nvar _reactSimpleMultiSelect2 = _interopRequireDefault(_reactSimpleMultiSelect);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _lodash = __webpack_require__(/*! lodash */ \"./node_modules/lodash/lodash.js\");\n\nvar _lodash2 = _interopRequireDefault(_lodash);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Copyright (C) 2018-2021. Huawei Technologies Co., Ltd. All rights reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar EchartPart = function (_React$Component) {\n _inherits(EchartPart, _React$Component);\n\n function EchartPart(props) {\n _classCallCheck(this, EchartPart);\n\n var _this = _possibleConstructorReturn(this, (EchartPart.__proto__ || Object.getPrototypeOf(EchartPart)).call(this, props));\n\n _this.state = {\n checkStatus: {\n checkOne: true,\n checkTwo: true,\n checkThree: true,\n checkFour: true,\n checkFive: true,\n checkSix: true,\n checkSeven: true,\n checkEight: true,\n // heatMapChart: true,\n cpuLoad: true\n // heatMapMemoryChart: true\n },\n itemList: [\n // {key: \"Cluster CPU Usage\", value: \"heatMapChart\"},\n // {key: \"Cluster Free Memory\", value: \"heatMapMemoryChart\"},\n { key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }, { key: \"Input Total Bytes\", value: \"checkEight\" }],\n selectedItemList: [\n // {key: \"Cluster CPU Usage\", value: \"heatMapChart\"},\n // {key: \"Cluster Free Memory\", value: \"heatMapMemoryChart\"},\n { key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }, { key: \"total Input Bytes\", value: \"checkEight\" }],\n chartName: ['Used Query Memory', 'Running Queries', 'Queued Queries', 'Blocked Queries', 'Active Workers', 'Avg Running Tasks', 'Avg CPU cycles per worker', 'total Input Bytes'],\n step: 10,\n timer: null,\n chartCpu: [],\n // heatMapChart: [],\n // heatMapMemoryChart: [],\n chart1: [],\n chart2: [],\n chart3: [],\n chart4: [],\n chart5: [],\n chart6: [],\n chart7: [],\n chart8: [],\n chartRef: null,\n lastRow: null,\n lastByte: null,\n lastWorker: null,\n memoryInit: false,\n unitArr: ['bytes', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'bytes'],\n lastRefresh: null\n };\n _this.state.chartRef = Object.keys(_this.state.checkStatus), _this._onChange = _this._onChange.bind(_this);\n _this.changeList = _this.changeList.bind(_this);\n _this.resize = _this.resize.bind(_this);\n return _this;\n }\n\n _createClass(EchartPart, [{\n key: \"resize\",\n value: function resize() {\n for (var i = 0; i < this.state.chartRef.length; i++) {\n var ref = this.refs[this.state.chartRef[i]];\n if (!ref.className) {\n var chart = _echarts2.default.init(ref);\n chart.resize({ silent: true });\n }\n }\n }\n }, {\n key: \"changeList\",\n value: function changeList(selectedItemList) {\n var _this2 = this;\n\n this.state.itemList.map(function (item) {\n _this2.state.checkStatus[item.value] = false;\n });\n selectedItemList.map(function (item) {\n _this2.state.checkStatus[item.value] = true;\n });\n var state = this.state;\n state.selectedItemList = selectedItemList;\n this.setState(state);\n }\n }, {\n key: \"changeState\",\n value: function changeState(name) {\n var state = this.state;\n state.checkStatus[name] = !state.checkStatus[name];\n this.setState(state);\n }\n\n //echarts\n\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.setXAxis();\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n _OverviewStore2.default.listen(this._onChange);\n this.lineDatas();\n\n var win = window;\n if (win.addEventListener) {\n win.addEventListener('resize', this.resize, false);\n } else if (win.attachEvent) {\n win.attachEvent('onresize', this.resize);\n } else {\n win.onresize = this.resize;\n }\n $(window).on('resize', this.resize);\n }\n }, {\n key: \"componentWillUnmount\",\n value: function componentWillUnmount() {\n _OverviewStore2.default.unlisten(this._onChange);\n clearInterval(this.state.timer);\n }\n\n //obtained data per sec\n\n }, {\n key: \"lineDatas\",\n value: function lineDatas() {\n this.state.timer = setInterval(function () {\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n }, 1000);\n }\n //refresh line\n\n }, {\n key: \"_onChange\",\n value: function _onChange(data) {\n if (data.requestNum % 2 === 0) {\n if (!this.state.memoryInit && data.memoryData) {\n // let cpuChart=echarts.init(this.refs.cpuLoad);\n // let option=cpuChart.getOption();\n // let memoryInitData=[];\n // let cpuSeries={};\n // let index = 0;\n // Object.keys(data.memoryData).map(key=>{\n // let op = Object.assign({}, option.series[index]);\n // index++;\n // op.name = key.slice(0, key.indexOf(\" \"));\n // let currentCpuData = [...this.delete(this.state.chartCpu), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // op.data = this.state.step === 10 ? currentCpuData.slice(1200) : this.state.step === 20 ? currentCpuData.slice(600) : currentCpuData;\n // op.areaStyle = {\n // shadowBlur: 10,\n // opacity: 0.1\n // };\n // op.type = 'line';\n // op.showSymbol = false;\n // memoryInitData.push(op);\n // cpuSeries[key]= currentCpuData;\n // })\n // option.series=memoryInitData;\n // option.yAxis = {max: 100, min: 0, type: \"value\"};\n // cpuChart.setOption(option);\n\n // let heatMapChart = echarts.init(this.refs.heatMapChart, \"royal\");\n // heatMapChart.setOption({\n // animation: false,\n // title: {\n // text: 'Cluster CPU Usage',\n // left: 'center',\n // textStyle: {\n // color: \"#767676\",\n // fontSize: 16\n // }\n // },\n // tooltip:{\n // trigger:'item',\n // formatter: function (params, t, cb) {\n // return params.name + \" : \" + params.value+\"%\";\n // }\n // },\n // series: [{\n // type: 'treemap',\n // data: this.state.heatMapChart\n // }]\n // })\n // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, \"royal\");\n // heatMapMemoryChart.setOption({\n // animation: false,\n // title: {\n // text: 'Cluster Free Memory ',\n // left: 'center',\n // textStyle: {\n // color: \"#767676\",\n // fontSize: 16\n // }\n // },\n // tooltip:{\n // trigger:'item',\n // formatter: function (params, t, cb) {\n // return params.name + \" : \" + formatDataSizeBytes(params.value);\n // }\n // },\n // series: [{\n // type: 'treemap',\n // data: this.state.heatMapMemoryChart\n // }]\n // })\n\n this.setState({\n memoryInit: true\n });\n }\n // else{\n // let dataCpu=this.state.chartCpu;\n // let mychart1=echarts.init(this.refs.cpuLoad);\n // let option=mychart1.getOption();\n // let memoryInitData=option.series;\n // Object.keys(data.memoryData).map(key=>{\n // let dataCpuElement = dataCpu[key];\n // if (_.isUndefined(dataCpuElement)) {\n // let op = Object.assign({}, option.series[index]);\n // op.name = key.slice(0, key.indexOf(\" \"));\n // op.areaStyle = {\n // shadowBlur: 10,\n // opacity: 0.1\n // };\n // op.type = 'line';\n // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // op.data = dataCpu[key];\n // memoryInitData.push(op);\n // }\n // else {\n // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]];\n // }\n // for(let i=0,len=memoryInitData.length;i {\n // let id = data.memoryData[key].id;\n // let name = key;\n // let index = _.findIndex(heatMapData, {id: id});\n // let newDataPoint = Number((data.memoryData[key].systemCpuLoad * 100).toFixed(2));\n // if (index == -1) {\n // let newData = {};\n // newData.id = id;\n // newData.name = name;\n // newData.value = newDataPoint\n // newData.dataset = [newDataPoint];\n // newData.children = [];\n // heatMapData.push(newData);\n // }\n // else {\n // let entry = heatMapData[index];\n // let dataset = entry.dataset;\n // if (dataset.length >= 600) {\n // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1);\n // }\n // dataset = [...dataset, newDataPoint]\n // entry.dataset = dataset;\n // let sum = 0;\n // for (let i = 0; i < dataset.length; i++) {\n // sum += dataset[i];\n // }\n // entry.value = Number((sum / dataset.length).toFixed(2));\n // }\n // });\n //\n // let heatMapDataSort = bubbleSort(heatMapData);\n // this.state.heatMapChart = heatMapDataSort.slice(0,10);\n // let heatMapChart = echarts.init(this.refs.heatMapChart, \"royal\");\n // let heatMapChartOption = heatMapChart.getOption();\n // heatMapChartOption.series = [{\n // type: \"treemap\",\n // data: heatMapDataSort.slice(0,10),\n // breadcrumb: {\n // show: false\n // }\n // }];\n // heatMapChart.setOption(heatMapChartOption);\n //\n // //heatMap memory data\n // let heatMapMemoryData = this.state.heatMapMemoryChart;\n // Object.keys(data.memoryData).map(key => {\n // let id = data.memoryData[key].id;\n // let name = key;\n // let index = _.findIndex(heatMapMemoryData, {id: id});\n // let newDataPoint = 0;\n // if (typeof (data.memoryData[key].pools.general) != \"undefined\"){\n // newDataPoint += data.memoryData[key].pools.general.freeBytes;\n // if (typeof (data.memoryData[key].pools.reserved) != \"undefined\"){\n // newDataPoint += data.memoryData[key].pools.reserved.freeBytes;\n // }\n // }\n // newDataPoint = Number(newDataPoint);\n // if (index == -1) {\n // let newData = {};\n // newData.id = id;\n // newData.name = name;\n // newData.value = newDataPoint\n // newData.dataset = [newDataPoint];\n // newData.children = [];\n // heatMapMemoryData.push(newData);\n // }\n // else {\n // let entry = heatMapMemoryData[index];\n // let dataset = entry.dataset;\n // if (dataset.length >= 600) {\n // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1);\n // }\n // dataset = [...dataset, newDataPoint]\n // entry.dataset = dataset;\n // let sum = 0;\n // for (let i = 0; i < dataset.length; i++) {\n // sum += dataset[i];\n // }\n // entry.value = Number((sum / dataset.length).toFixed(2));\n // }\n // });\n // let heatMapMemoryDataSort = bubbleSort(heatMapMemoryData);\n // this.state.heatMapMemoryChart = heatMapMemoryDataSort.slice(0,10);\n // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, \"royal\");\n // let heatMapMemoryChartOption = heatMapMemoryChart.getOption();\n // heatMapMemoryChartOption.series = [{\n // type: \"treemap\",\n // data: heatMapMemoryDataSort.slice(0,10),\n // breadcrumb: {\n // show: false\n // },\n // }];\n // heatMapMemoryChart.setOption(heatMapMemoryChartOption);\n\n var now = Date.now();\n var secondsSinceLastRefresh = this.state.lastRefresh ? (now - this.state.lastRefresh) / 1000.0 : 1;\n secondsSinceLastRefresh = secondsSinceLastRefresh < 1 ? 1 : secondsSinceLastRefresh;\n var lastWorker = this.state.lastWorker ? (data.lineData.totalCpuTimeSecs - this.state.lastWorker) / data.lineData.activeWorkers / secondsSinceLastRefresh : 0;\n this.setState({\n chartCpu: [].concat(_toConsumableArray(this.delete(this.state.chartCpu)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), (data.lineData.systemCpuLoad * 100).toFixed(4)]]),\n chart1: [].concat(_toConsumableArray(this.delete(this.state.chart1)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.reservedMemory]]),\n chart2: [].concat(_toConsumableArray(this.delete(this.state.chart2)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningQueries]]),\n chart3: [].concat(_toConsumableArray(this.delete(this.state.chart3)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.queuedQueries]]),\n chart4: [].concat(_toConsumableArray(this.delete(this.state.chart4)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.blockedQueries]]),\n chart5: [].concat(_toConsumableArray(this.delete(this.state.chart5)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.activeWorkers]]),\n chart6: [].concat(_toConsumableArray(this.delete(this.state.chart6)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningDrivers]]),\n chart7: [].concat(_toConsumableArray(this.delete(this.state.chart7)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), lastWorker]]),\n chart8: [].concat(_toConsumableArray(this.delete(this.state.chart8)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.totalInputBytes]]),\n lastWorker: data.lineData.totalCpuTimeSecs,\n // heatMapChart: this.state.heatMapChart,\n // heatMapMemoryChart: this.state.heatMapMemoryChart,\n lastRefresh: now\n });\n if (!this.refs.cpuLoad.className) {\n var mychart = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart.getOption();\n option.series[0].data = this.state.step === 10 ? this.state.chartCpu.slice(1200) : this.state.step === 20 ? this.state.chartCpu.slice(600) : this.state.chartCpu;\n option.series[0].areaStyle = {\n color: \"#41BB04\",\n shadowBlur: 10,\n opacity: 0.1\n };\n option.series[0].lineStyle = { color: \"#137113\" };\n option.series[0].itemStyle = { color: \"#137113\" };\n option.yAxis = { max: 100, min: 0, type: \"value\" };\n mychart.setOption(option);\n }\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var _mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option = _mychart.getOption();\n _option.series[0].data = this.state.step === 10 ? this.state['chart' + parseInt(i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(i + 1)].slice(600) : this.state['chart' + parseInt(i + 1)];\n _option.series[0].areaStyle = {\n color: \"#c3c683\",\n shadowBlur: 10,\n opacity: 0.1\n };\n _option.series[0].lineStyle = { color: \"#b6a019\" };\n _option.series[0].itemStyle = { color: \"#b6a019\" };\n _mychart.setOption(_option);\n }\n }\n }\n }\n\n // delete first data\n\n }, {\n key: \"delete\",\n value: function _delete(arr) {\n if (_lodash2.default.isUndefined(arr)) {\n return [];\n }\n arr.splice(0, 1);\n return arr;\n }\n //according to step to set XAxis data\n\n }, {\n key: \"setXAxis\",\n value: function setXAxis() {\n var arr = [];\n for (var i = 0, len = 30 * 60; i < len; i++) {\n arr[i] = [new Date(new Date().getTime() - 1000 * i).format('yyyy-MM-dd hh:mm:ss'), 0];\n }\n arr = arr.reverse();\n this.setState({\n chartCpu: [].concat(_toConsumableArray(arr)),\n chart1: [].concat(_toConsumableArray(arr)),\n chart2: [].concat(_toConsumableArray(arr)),\n chart3: [].concat(_toConsumableArray(arr)),\n chart4: [].concat(_toConsumableArray(arr)),\n chart5: [].concat(_toConsumableArray(arr)),\n chart6: [].concat(_toConsumableArray(arr)),\n chart7: [].concat(_toConsumableArray(arr)),\n chart8: [].concat(_toConsumableArray(arr))\n });\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n mychart1.setOption({\n animation: false,\n title: { text: 'Average Cluster CPU Usage',\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: 'usage(%)',\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n return value;\n }\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: []\n }]\n });\n for (var _i = 0; _i < this.state.chartName.length; _i++) {\n if (!this.refs[this.state.chartRef[_i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[_i]]);\n mychart.setOption({\n animation: false,\n title: {\n text: this.state.chartName[_i],\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: this.state.unitArr[_i],\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function (name, value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n if (name === 'quantity') {\n return (0, _utils.formatCount)(value);\n } else if (name === 'bytes') {\n return (0, _utils.formatDataSizeBytes)(value);\n } else {\n return value;\n }\n }.bind(null, this.state.unitArr[_i])\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: this.state.step === 10 ? this.state['chart' + parseInt(_i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(_i + 1)].slice(600) : this.state['chart' + parseInt(_i + 1)]\n }]\n });\n }\n }\n }\n }, {\n key: \"selected\",\n value: function selected(e) {\n clearInterval(this.state.timer);\n e.preventDefault();\n var val = e.target.selectedIndex === 0 ? 10 : e.target.selectedIndex === 1 ? 20 : 30;\n var state = this.state;\n state.step = val;\n this.setState(state);\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option2 = mychart.getOption();\n _option2.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n // option.series[0].data=[];\n mychart.setOption(_option2);\n }\n }\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart1.getOption();\n option.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n mychart1.setOption(option);\n _OverviewActions2.default.getData();\n this.lineDatas();\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this3 = this;\n\n var style = { height: \"30vh\", width: \"calc(40vw - 80px)\", left: \"center\", top: \"center\" };\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"selectItemContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: \"selectChart multiSelect\" },\n _react2.default.createElement(_reactSimpleMultiSelect2.default, {\n title: \"Select Chart\",\n itemList: this.state.itemList,\n selectedItemList: this.state.selectedItemList,\n changeList: this.changeList,\n isObjectArray: true\n })\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"select-part\" },\n _react2.default.createElement(\n \"select\",\n { onChange: this.selected.bind(this), value: this.state.step },\n _react2.default.createElement(\n \"option\",\n { value: \"10\" },\n \"Last 10 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"20\" },\n \"Last 20 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"30\" },\n \"Last 30 minutes\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainerParent\" },\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: this.state.checkStatus[\"cpuLoad\"] ? 'overviewChart' : 'display-none' },\n _react2.default.createElement(\"div\", { ref: \"cpuLoad\", style: style })\n ),\n Object.keys(this.state.checkStatus).map(function (key, index) {\n if (key == 'cpuLoad') {\n return null;\n }\n return _react2.default.createElement(\n \"div\",\n { className: _this3.state.checkStatus[key] ? 'overviewChart' : 'display-none', key: index },\n _react2.default.createElement(\"div\", { ref: key, style: style })\n );\n })\n )\n )\n );\n }\n }]);\n\n return EchartPart;\n}(_react2.default.Component);\n\nexports.default = EchartPart;\n\n//# sourceURL=webpack:///./overview/EchartPart.jsx?"); +eval("\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\n\nvar _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if (\"value\" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();\n\nvar _react = __webpack_require__(/*! react */ \"./node_modules/react/index.js\");\n\nvar _react2 = _interopRequireDefault(_react);\n\nvar _echarts = __webpack_require__(/*! echarts/lib/echarts */ \"./node_modules/echarts/lib/echarts.js\");\n\nvar _echarts2 = _interopRequireDefault(_echarts);\n\n__webpack_require__(/*! echarts/lib/chart/line */ \"./node_modules/echarts/lib/chart/line.js\");\n\n__webpack_require__(/*! echarts/lib/chart/treemap */ \"./node_modules/echarts/lib/chart/treemap.js\");\n\n__webpack_require__(/*! echarts/theme/royal */ \"./node_modules/echarts/theme/royal.js\");\n\n__webpack_require__(/*! echarts/lib/component/tooltip */ \"./node_modules/echarts/lib/component/tooltip.js\");\n\n__webpack_require__(/*! echarts/lib/component/title */ \"./node_modules/echarts/lib/component/title.js\");\n\nvar _OverviewActions = __webpack_require__(/*! ./OverviewActions */ \"./overview/OverviewActions.js\");\n\nvar _OverviewActions2 = _interopRequireDefault(_OverviewActions);\n\nvar _OverviewStore = __webpack_require__(/*! ./OverviewStore */ \"./overview/OverviewStore.js\");\n\nvar _OverviewStore2 = _interopRequireDefault(_OverviewStore);\n\nvar _reactSimpleMultiSelect = __webpack_require__(/*! react-simple-multi-select */ \"./node_modules/react-simple-multi-select/build/components/MultiSelect.js\");\n\nvar _reactSimpleMultiSelect2 = _interopRequireDefault(_reactSimpleMultiSelect);\n\nvar _utils = __webpack_require__(/*! ../utils */ \"./utils.js\");\n\nvar _lodash = __webpack_require__(/*! lodash */ \"./node_modules/lodash/lodash.js\");\n\nvar _lodash2 = _interopRequireDefault(_lodash);\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }\n\nfunction _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError(\"Cannot call a class as a function\"); } }\n\nfunction _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\"); } return call && (typeof call === \"object\" || typeof call === \"function\") ? call : self; }\n\nfunction _inherits(subClass, superClass) { if (typeof superClass !== \"function\" && superClass !== null) { throw new TypeError(\"Super expression must either be null or a function, not \" + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /*\n * Copyright (C) 2018-2021. Huawei Technologies Co., Ltd. All rights reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n\nvar EchartPart = function (_React$Component) {\n _inherits(EchartPart, _React$Component);\n\n function EchartPart(props) {\n _classCallCheck(this, EchartPart);\n\n var _this = _possibleConstructorReturn(this, (EchartPart.__proto__ || Object.getPrototypeOf(EchartPart)).call(this, props));\n\n _this.state = {\n checkStatus: {\n checkOne: true,\n checkTwo: true,\n checkThree: true,\n checkFour: true,\n checkFive: true,\n checkSix: true,\n checkSeven: true,\n checkEight: true,\n cpuLoad: true\n },\n itemList: [{ key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }, { key: \"Input Total Bytes\", value: \"checkEight\" }],\n selectedItemList: [{ key: \"Avg Cluster CPU Usage\", value: \"cpuLoad\" }, { key: \"Used Query Memory\", value: \"checkOne\" }, { key: \"Running Queries\", value: \"checkTwo\" }, { key: \"Queued Queries\", value: \"checkThree\" }, { key: \"Blocked Queries\", value: \"checkFour\" }, { key: \"Active Workers\", value: \"checkFive\" }, { key: \"Avg Running Tasks\", value: \"checkSix\" }, { key: \"Avg CPU cycles per worker\", value: \"checkSeven\" }, { key: \"total Input Bytes\", value: \"checkEight\" }],\n chartName: ['Used Query Memory', 'Running Queries', 'Queued Queries', 'Blocked Queries', 'Active Workers', 'Avg Running Tasks', 'Avg CPU cycles per worker', 'total Input Bytes'],\n step: 10,\n timer: null,\n chartCpu: [],\n chart1: [],\n chart2: [],\n chart3: [],\n chart4: [],\n chart5: [],\n chart6: [],\n chart7: [],\n chart8: [],\n chartRef: null,\n lastRow: null,\n lastByte: null,\n lastWorker: null,\n memoryInit: false,\n unitArr: ['bytes', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'quantity', 'bytes'],\n lastRefresh: null\n };\n _this.state.chartRef = Object.keys(_this.state.checkStatus), _this._onChange = _this._onChange.bind(_this);\n _this.changeList = _this.changeList.bind(_this);\n _this.resize = _this.resize.bind(_this);\n return _this;\n }\n\n _createClass(EchartPart, [{\n key: \"resize\",\n value: function resize() {\n for (var i = 0; i < this.state.chartRef.length; i++) {\n var ref = this.refs[this.state.chartRef[i]];\n if (!ref.className) {\n var chart = _echarts2.default.init(ref);\n chart.resize({ silent: true });\n }\n }\n }\n }, {\n key: \"changeList\",\n value: function changeList(selectedItemList) {\n var _this2 = this;\n\n this.state.itemList.map(function (item) {\n _this2.state.checkStatus[item.value] = false;\n });\n selectedItemList.map(function (item) {\n _this2.state.checkStatus[item.value] = true;\n });\n var state = this.state;\n state.selectedItemList = selectedItemList;\n this.setState(state);\n }\n }, {\n key: \"changeState\",\n value: function changeState(name) {\n var state = this.state;\n state.checkStatus[name] = !state.checkStatus[name];\n this.setState(state);\n }\n\n //echarts\n\n }, {\n key: \"componentDidMount\",\n value: function componentDidMount() {\n this.setXAxis();\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n _OverviewStore2.default.listen(this._onChange);\n this.lineDatas();\n\n var win = window;\n if (win.addEventListener) {\n win.addEventListener('resize', this.resize, false);\n } else if (win.attachEvent) {\n win.attachEvent('onresize', this.resize);\n } else {\n win.onresize = this.resize;\n }\n $(window).on('resize', this.resize);\n }\n }, {\n key: \"componentWillUnmount\",\n value: function componentWillUnmount() {\n _OverviewStore2.default.unlisten(this._onChange);\n clearInterval(this.state.timer);\n }\n\n //obtained data per sec\n\n }, {\n key: \"lineDatas\",\n value: function lineDatas() {\n this.state.timer = setInterval(function () {\n _OverviewActions2.default.getData();\n _OverviewActions2.default.getMemoryData();\n }, 1000);\n }\n //refresh line\n\n }, {\n key: \"_onChange\",\n value: function _onChange(data) {\n if (data.requestNum % 2 === 0) {\n if (!this.state.memoryInit && data.memoryData) {\n this.setState({\n memoryInit: true\n });\n }\n var now = Date.now();\n var secondsSinceLastRefresh = this.state.lastRefresh ? (now - this.state.lastRefresh) / 1000.0 : 1;\n secondsSinceLastRefresh = secondsSinceLastRefresh < 1 ? 1 : secondsSinceLastRefresh;\n var lastWorker = this.state.lastWorker ? (data.lineData.totalCpuTimeSecs - this.state.lastWorker) / data.lineData.activeWorkers / secondsSinceLastRefresh : 0;\n this.setState({\n chartCpu: [].concat(_toConsumableArray(this.delete(this.state.chartCpu)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), (data.lineData.systemCpuLoad * 100).toFixed(4)]]),\n chart1: [].concat(_toConsumableArray(this.delete(this.state.chart1)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.reservedMemory]]),\n chart2: [].concat(_toConsumableArray(this.delete(this.state.chart2)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningQueries]]),\n chart3: [].concat(_toConsumableArray(this.delete(this.state.chart3)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.queuedQueries]]),\n chart4: [].concat(_toConsumableArray(this.delete(this.state.chart4)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.blockedQueries]]),\n chart5: [].concat(_toConsumableArray(this.delete(this.state.chart5)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.activeWorkers]]),\n chart6: [].concat(_toConsumableArray(this.delete(this.state.chart6)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.runningDrivers]]),\n chart7: [].concat(_toConsumableArray(this.delete(this.state.chart7)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), lastWorker]]),\n chart8: [].concat(_toConsumableArray(this.delete(this.state.chart8)), [[new Date().format('yyyy-MM-dd hh:mm:ss'), data.lineData.totalInputBytes]]),\n lastWorker: data.lineData.totalCpuTimeSecs,\n lastRefresh: now\n });\n if (!this.refs.cpuLoad.className) {\n var mychart = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart.getOption();\n option.series[0].data = this.state.step === 10 ? this.state.chartCpu.slice(1200) : this.state.step === 20 ? this.state.chartCpu.slice(600) : this.state.chartCpu;\n option.series[0].areaStyle = {\n color: \"#41BB04\",\n shadowBlur: 10,\n opacity: 0.1\n };\n option.series[0].lineStyle = { color: \"#137113\" };\n option.series[0].itemStyle = { color: \"#137113\" };\n option.yAxis = { max: 100, min: 0, type: \"value\" };\n mychart.setOption(option);\n }\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var _mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option = _mychart.getOption();\n _option.series[0].data = this.state.step === 10 ? this.state['chart' + parseInt(i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(i + 1)].slice(600) : this.state['chart' + parseInt(i + 1)];\n _option.series[0].areaStyle = {\n color: \"#c3c683\",\n shadowBlur: 10,\n opacity: 0.1\n };\n _option.series[0].lineStyle = { color: \"#b6a019\" };\n _option.series[0].itemStyle = { color: \"#b6a019\" };\n _mychart.setOption(_option);\n }\n }\n }\n }\n\n // delete first data\n\n }, {\n key: \"delete\",\n value: function _delete(arr) {\n if (_lodash2.default.isUndefined(arr)) {\n return [];\n }\n arr.splice(0, 1);\n return arr;\n }\n //according to step to set XAxis data\n\n }, {\n key: \"setXAxis\",\n value: function setXAxis() {\n var arr = [];\n for (var i = 0, len = 30 * 60; i < len; i++) {\n arr[i] = [new Date(new Date().getTime() - 1000 * i).format('yyyy-MM-dd hh:mm:ss'), 0];\n }\n arr = arr.reverse();\n this.setState({\n chartCpu: [].concat(_toConsumableArray(arr)),\n chart1: [].concat(_toConsumableArray(arr)),\n chart2: [].concat(_toConsumableArray(arr)),\n chart3: [].concat(_toConsumableArray(arr)),\n chart4: [].concat(_toConsumableArray(arr)),\n chart5: [].concat(_toConsumableArray(arr)),\n chart6: [].concat(_toConsumableArray(arr)),\n chart7: [].concat(_toConsumableArray(arr)),\n chart8: [].concat(_toConsumableArray(arr))\n });\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n mychart1.setOption({\n animation: false,\n title: { text: 'Average Cluster CPU Usage',\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: 'usage(%)',\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n return value;\n }\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: []\n }]\n });\n for (var _i = 0; _i < this.state.chartName.length; _i++) {\n if (!this.refs[this.state.chartRef[_i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[_i]]);\n mychart.setOption({\n animation: false,\n title: {\n text: this.state.chartName[_i],\n left: 'center',\n textStyle: {\n color: \"#767676\",\n fontSize: 16\n }\n },\n tooltip: {\n trigger: 'axis'\n },\n xAxis: {\n type: 'time',\n name: 'time',\n interval: 60 * 1000 * this.state.step / 10,\n boundaryGap: false,\n axisLabel: {\n formatter: function formatter(value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n var date = new Date(value).format(\"yyyy-MM-dd hh:mm:ss\");\n return date.slice(11, 16);\n }\n }\n },\n yAxis: {\n name: this.state.unitArr[_i],\n axisTick: {\n show: false\n },\n axisLabel: {\n formatter: function (name, value, index) {\n if (index % 2 == 1) {\n return \"\";\n }\n if (name === 'quantity') {\n return (0, _utils.formatCount)(value);\n } else if (name === 'bytes') {\n return (0, _utils.formatDataSizeBytes)(value);\n } else {\n return value;\n }\n }.bind(null, this.state.unitArr[_i])\n }\n },\n series: [{\n type: 'line',\n symbol: 'none',\n data: this.state.step === 10 ? this.state['chart' + parseInt(_i + 1)].slice(1200) : this.state.step === 20 ? this.state['chart' + parseInt(_i + 1)].slice(600) : this.state['chart' + parseInt(_i + 1)]\n }]\n });\n }\n }\n }\n }, {\n key: \"selected\",\n value: function selected(e) {\n clearInterval(this.state.timer);\n e.preventDefault();\n var val = e.target.selectedIndex === 0 ? 10 : e.target.selectedIndex === 1 ? 20 : 30;\n var state = this.state;\n state.step = val;\n this.setState(state);\n for (var i = 0; i < this.state.chartName.length; i++) {\n if (!this.refs[this.state.chartRef[i]].className) {\n var mychart = _echarts2.default.init(this.refs[this.state.chartRef[i]]);\n var _option2 = mychart.getOption();\n _option2.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n mychart.setOption(_option2);\n }\n }\n var mychart1 = _echarts2.default.init(this.refs.cpuLoad);\n var option = mychart1.getOption();\n option.xAxis[0].interval = 60 * 1000 * this.state.step / 10;\n mychart1.setOption(option);\n _OverviewActions2.default.getData();\n this.lineDatas();\n }\n }, {\n key: \"render\",\n value: function render() {\n var _this3 = this;\n\n var style = { height: \"30vh\", width: \"calc(40vw - 80px)\", left: \"center\", top: \"center\" };\n return _react2.default.createElement(\n \"div\",\n null,\n _react2.default.createElement(\n \"div\",\n { className: \"selectItemContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: \"selectChart multiSelect\" },\n _react2.default.createElement(_reactSimpleMultiSelect2.default, {\n title: \"Select Chart\",\n itemList: this.state.itemList,\n selectedItemList: this.state.selectedItemList,\n changeList: this.changeList,\n isObjectArray: true\n })\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"select-part\" },\n _react2.default.createElement(\n \"select\",\n { onChange: this.selected.bind(this), value: this.state.step },\n _react2.default.createElement(\n \"option\",\n { value: \"10\" },\n \"Last 10 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"20\" },\n \"Last 20 minutes\"\n ),\n _react2.default.createElement(\n \"option\",\n { value: \"30\" },\n \"Last 30 minutes\"\n )\n )\n )\n ),\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainerParent\" },\n _react2.default.createElement(\n \"div\",\n { className: \"overviewGraphContainer\" },\n _react2.default.createElement(\n \"div\",\n { className: this.state.checkStatus[\"cpuLoad\"] ? 'overviewChart' : 'display-none' },\n _react2.default.createElement(\"div\", { ref: \"cpuLoad\", style: style })\n ),\n Object.keys(this.state.checkStatus).map(function (key, index) {\n if (key == 'cpuLoad') {\n return null;\n }\n return _react2.default.createElement(\n \"div\",\n { className: _this3.state.checkStatus[key] ? 'overviewChart' : 'display-none', key: index },\n _react2.default.createElement(\"div\", { ref: key, style: style })\n );\n })\n )\n )\n );\n }\n }]);\n\n return EchartPart;\n}(_react2.default.Component);\n\nexports.default = EchartPart;\n\n//# sourceURL=webpack:///./overview/EchartPart.jsx?"); /***/ }), diff --git a/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx b/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx index 4a5c953f5..948f6c7c2 100644 --- a/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx +++ b/presto-main/src/main/resources/webapp/src/overview/EchartPart.jsx @@ -38,13 +38,9 @@ class EchartPart extends React.Component{ checkSix:true, checkSeven:true, checkEight:true, - // heatMapChart: true, cpuLoad: true - // heatMapMemoryChart: true }, itemList: [ - // {key: "Cluster CPU Usage", value: "heatMapChart"}, - // {key: "Cluster Free Memory", value: "heatMapMemoryChart"}, {key: "Avg Cluster CPU Usage", value: "cpuLoad"}, {key: "Used Query Memory", value: "checkOne"}, {key: "Running Queries", value: "checkTwo"}, @@ -56,8 +52,6 @@ class EchartPart extends React.Component{ {key: "Input Total Bytes", value: "checkEight"} ], selectedItemList: [ - // {key: "Cluster CPU Usage", value: "heatMapChart"}, - // {key: "Cluster Free Memory", value: "heatMapMemoryChart"}, {key: "Avg Cluster CPU Usage", value: "cpuLoad"}, {key: "Used Query Memory", value: "checkOne"}, {key: "Running Queries", value: "checkTwo"}, @@ -72,8 +66,6 @@ class EchartPart extends React.Component{ step:10, timer:null, chartCpu:[], - // heatMapChart: [], - // heatMapMemoryChart: [], chart1:[], chart2:[], chart3:[], @@ -155,209 +147,10 @@ class EchartPart extends React.Component{ _onChange(data){ if(data.requestNum%2===0){ if(!this.state.memoryInit && data.memoryData){ - // let cpuChart=echarts.init(this.refs.cpuLoad); - // let option=cpuChart.getOption(); - // let memoryInitData=[]; - // let cpuSeries={}; - // let index = 0; - // Object.keys(data.memoryData).map(key=>{ - // let op = Object.assign({}, option.series[index]); - // index++; - // op.name = key.slice(0, key.indexOf(" ")); - // let currentCpuData = [...this.delete(this.state.chartCpu), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]]; - // op.data = this.state.step === 10 ? currentCpuData.slice(1200) : this.state.step === 20 ? currentCpuData.slice(600) : currentCpuData; - // op.areaStyle = { - // shadowBlur: 10, - // opacity: 0.1 - // }; - // op.type = 'line'; - // op.showSymbol = false; - // memoryInitData.push(op); - // cpuSeries[key]= currentCpuData; - // }) - // option.series=memoryInitData; - // option.yAxis = {max: 100, min: 0, type: "value"}; - // cpuChart.setOption(option); - - // let heatMapChart = echarts.init(this.refs.heatMapChart, "royal"); - // heatMapChart.setOption({ - // animation: false, - // title: { - // text: 'Cluster CPU Usage', - // left: 'center', - // textStyle: { - // color: "#767676", - // fontSize: 16 - // } - // }, - // tooltip:{ - // trigger:'item', - // formatter: function (params, t, cb) { - // return params.name + " : " + params.value+"%"; - // } - // }, - // series: [{ - // type: 'treemap', - // data: this.state.heatMapChart - // }] - // }) - // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, "royal"); - // heatMapMemoryChart.setOption({ - // animation: false, - // title: { - // text: 'Cluster Free Memory ', - // left: 'center', - // textStyle: { - // color: "#767676", - // fontSize: 16 - // } - // }, - // tooltip:{ - // trigger:'item', - // formatter: function (params, t, cb) { - // return params.name + " : " + formatDataSizeBytes(params.value); - // } - // }, - // series: [{ - // type: 'treemap', - // data: this.state.heatMapMemoryChart - // }] - // }) - this.setState({ memoryInit:true }) } - // else{ - // let dataCpu=this.state.chartCpu; - // let mychart1=echarts.init(this.refs.cpuLoad); - // let option=mychart1.getOption(); - // let memoryInitData=option.series; - // Object.keys(data.memoryData).map(key=>{ - // let dataCpuElement = dataCpu[key]; - // if (_.isUndefined(dataCpuElement)) { - // let op = Object.assign({}, option.series[index]); - // op.name = key.slice(0, key.indexOf(" ")); - // op.areaStyle = { - // shadowBlur: 10, - // opacity: 0.1 - // }; - // op.type = 'line'; - // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]]; - // op.data = dataCpu[key]; - // memoryInitData.push(op); - // } - // else { - // dataCpu[key] = [...this.delete(dataCpuElement), [new Date().format('yyyy-MM-dd hh:mm:ss'), (data.memoryData[key].processCpuLoad * 100).toFixed(2)]]; - // } - // for(let i=0,len=memoryInitData.length;i { - // let id = data.memoryData[key].id; - // let name = key; - // let index = _.findIndex(heatMapData, {id: id}); - // let newDataPoint = Number((data.memoryData[key].systemCpuLoad * 100).toFixed(2)); - // if (index == -1) { - // let newData = {}; - // newData.id = id; - // newData.name = name; - // newData.value = newDataPoint - // newData.dataset = [newDataPoint]; - // newData.children = []; - // heatMapData.push(newData); - // } - // else { - // let entry = heatMapData[index]; - // let dataset = entry.dataset; - // if (dataset.length >= 600) { - // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1); - // } - // dataset = [...dataset, newDataPoint] - // entry.dataset = dataset; - // let sum = 0; - // for (let i = 0; i < dataset.length; i++) { - // sum += dataset[i]; - // } - // entry.value = Number((sum / dataset.length).toFixed(2)); - // } - // }); - // - // let heatMapDataSort = bubbleSort(heatMapData); - // this.state.heatMapChart = heatMapDataSort.slice(0,10); - // let heatMapChart = echarts.init(this.refs.heatMapChart, "royal"); - // let heatMapChartOption = heatMapChart.getOption(); - // heatMapChartOption.series = [{ - // type: "treemap", - // data: heatMapDataSort.slice(0,10), - // breadcrumb: { - // show: false - // } - // }]; - // heatMapChart.setOption(heatMapChartOption); - // - // //heatMap memory data - // let heatMapMemoryData = this.state.heatMapMemoryChart; - // Object.keys(data.memoryData).map(key => { - // let id = data.memoryData[key].id; - // let name = key; - // let index = _.findIndex(heatMapMemoryData, {id: id}); - // let newDataPoint = 0; - // if (typeof (data.memoryData[key].pools.general) != "undefined"){ - // newDataPoint += data.memoryData[key].pools.general.freeBytes; - // if (typeof (data.memoryData[key].pools.reserved) != "undefined"){ - // newDataPoint += data.memoryData[key].pools.reserved.freeBytes; - // } - // } - // newDataPoint = Number(newDataPoint); - // if (index == -1) { - // let newData = {}; - // newData.id = id; - // newData.name = name; - // newData.value = newDataPoint - // newData.dataset = [newDataPoint]; - // newData.children = []; - // heatMapMemoryData.push(newData); - // } - // else { - // let entry = heatMapMemoryData[index]; - // let dataset = entry.dataset; - // if (dataset.length >= 600) { - // dataset = dataset.splice(600 - 1, dataset.length - 600 - 1); - // } - // dataset = [...dataset, newDataPoint] - // entry.dataset = dataset; - // let sum = 0; - // for (let i = 0; i < dataset.length; i++) { - // sum += dataset[i]; - // } - // entry.value = Number((sum / dataset.length).toFixed(2)); - // } - // }); - // let heatMapMemoryDataSort = bubbleSort(heatMapMemoryData); - // this.state.heatMapMemoryChart = heatMapMemoryDataSort.slice(0,10); - // let heatMapMemoryChart = echarts.init(this.refs.heatMapMemoryChart, "royal"); - // let heatMapMemoryChartOption = heatMapMemoryChart.getOption(); - // heatMapMemoryChartOption.series = [{ - // type: "treemap", - // data: heatMapMemoryDataSort.slice(0,10), - // breadcrumb: { - // show: false - // }, - // }]; - // heatMapMemoryChart.setOption(heatMapMemoryChartOption); - let now = Date.now(); let secondsSinceLastRefresh = this.state.lastRefresh ? (now - this.state.lastRefresh) / 1000.0 : 1; secondsSinceLastRefresh = secondsSinceLastRefresh < 1 ? 1 : secondsSinceLastRefresh; @@ -374,8 +167,6 @@ class EchartPart extends React.Component{ chart8:[...this.delete(this.state.chart8),[new Date().format('yyyy-MM-dd hh:mm:ss'),data.lineData.totalInputBytes ]], lastWorker:data.lineData.totalCpuTimeSecs, - // heatMapChart: this.state.heatMapChart, - // heatMapMemoryChart: this.state.heatMapMemoryChart, lastRefresh: now }); if (!this.refs.cpuLoad.className) { @@ -559,7 +350,6 @@ class EchartPart extends React.Component{ let mychart=echarts.init(this.refs[this.state.chartRef[i]]); let option=mychart.getOption(); option.xAxis[0].interval=60*1000*this.state.step/10; - // option.series[0].data=[]; mychart.setOption(option); } } @@ -595,12 +385,6 @@ class EchartPart extends React.Component{
- {/*
*/} - {/*
*/} - {/*
*/} - {/*
*/} - {/*
*/} - {/*
*/}
-- Gitee From 488dfd73f381321c40752095076e50d859b74f62 Mon Sep 17 00:00:00 2001 From: zhousipei Date: Mon, 27 Jun 2022 16:53:44 +0800 Subject: [PATCH 23/30] amend extension-execution-planner docs --- hetu-docs/en/admin/extension-execution-planner.md | 10 ++++++++-- hetu-docs/zh/admin/extension-execution-planner.md | 9 +++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/hetu-docs/en/admin/extension-execution-planner.md b/hetu-docs/en/admin/extension-execution-planner.md index 45a1109ab..f1dd46f96 100644 --- a/hetu-docs/en/admin/extension-execution-planner.md +++ b/hetu-docs/en/admin/extension-execution-planner.md @@ -2,12 +2,18 @@ This section describes how to add an extension physical execution planner in openLooKeng. With the extension physical execution planner, openLooKeng can utilize other operator acceleration libraries to speed up the execution of SQL statements. ## Configuration -To enable extension physical execution feature, the following configs must be added in +First, you should get the below jar package and c++ libraries from links: + +- `boostkit-omniop-openlookeng-1.6.1-1.0.0-aarch64.jar`: [Link](https://github.com/kunpengcompute/boostkit-bigdata/releases/download/v1.3.0/boostkit-omniop-openlookeng-1.6.1-1.0.0-aarch64.zip) + +- `libboostkit-omniop-xx-1.0.0-aarch.so`: [Link](https://www.hikunpeng.com/en/developer/boostkit/big-data?acclerated=3) + +Then, to enable extension physical execution feature, the following configs must be added in `config.properties`: ``` properties extension_execution_planner_enabled=true -extension_execution_planner_jar_path=file:///xxPath/omni-openLooKeng-adapter-1.6.1-SNAPSHOT.jar +extension_execution_planner_jar_path=file:///xxPath/boostkit-omniop-openlookeng-1.6.1-1.0.0-aarch64.jar extension_execution_planner_class_path=nova.hetu.olk.OmniLocalExecutionPlanner ``` diff --git a/hetu-docs/zh/admin/extension-execution-planner.md b/hetu-docs/zh/admin/extension-execution-planner.md index 52c54736f..b3250ee35 100644 --- a/hetu-docs/zh/admin/extension-execution-planner.md +++ b/hetu-docs/zh/admin/extension-execution-planner.md @@ -2,11 +2,16 @@ 本节介绍openLooKeng如何添加扩展物理执行计划。通过物理执行计划的扩展,openLooKeng可以使用其他算子加速库来加速SQL语句的执行。 ## 配置 -在配置文件`config.properties`增加如下配置: +首先,需要从以下链接获取相关jar包和c++库: +- `boostkit-omniop-openlookeng-1.6.1-1.0.0-aarch64.jar`: [Link](https://github.com/kunpengcompute/boostkit-bigdata/releases/download/v1.3.0/boostkit-omniop-openlookeng-1.6.1-1.0.0-aarch64.zip) + +- `libboostkit-omniop-xx-1.0.0-aarch.so`: [Link](https://www.hikunpeng.com/zh/developer/boostkit/big-data?acclerated=3) + +然后,在配置文件`config.properties`增加如下配置: ``` properties extension_execution_planner_enabled=true -extension_execution_planner_jar_path=file:///xxPath/omni-openLooKeng-adapter-1.6.1-SNAPSHOT.jar +extension_execution_planner_jar_path=file:///xxPath/boostkit-omniop-openlookeng-1.6.1-1.0.0-aarch64.jar extension_execution_planner_class_path=nova.hetu.olk.OmniLocalExecutionPlanner ``` -- Gitee From 260fe8c260e6cbfae7677f0c5b1847a2a9d2eaf5 Mon Sep 17 00:00:00 2001 From: Alex Zhang Date: Tue, 5 Jul 2022 17:43:45 +0800 Subject: [PATCH 24/30] batch query - define spi interfaces for exchange manager module --- .../io/prestosql/spi/exchange/Exchange.java | 50 ++++++++++++++ .../spi/exchange/ExchangeContext.java | 34 +++++++++ .../io/prestosql/spi/exchange/ExchangeId.java | 69 +++++++++++++++++++ .../spi/exchange/ExchangeManager.java | 38 ++++++++++ .../spi/exchange/ExchangeManagerFactory.java | 24 +++++++ .../ExchangeManagerHandleResolver.java | 20 ++++++ .../prestosql/spi/exchange/ExchangeSink.java | 32 +++++++++ .../spi/exchange/ExchangeSinkHandle.java | 17 +++++ .../exchange/ExchangeSinkInstanceHandle.java | 17 +++++ .../spi/exchange/ExchangeSource.java | 38 ++++++++++ .../spi/exchange/ExchangeSourceHandle.java | 17 +++++ .../spi/exchange/ExchangeSourceSplitter.java | 17 +++++ .../exchange/ExchangeSourceStatistics.java | 26 +++++++ 13 files changed, 399 insertions(+) create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/Exchange.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeContext.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeId.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManager.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerFactory.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerHandleResolver.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSink.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkHandle.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkInstanceHandle.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSource.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceHandle.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceSplitter.java create mode 100644 presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceStatistics.java diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/Exchange.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/Exchange.java new file mode 100644 index 000000000..8e21cc0bf --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/Exchange.java @@ -0,0 +1,50 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import java.io.Closeable; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public interface Exchange extends Closeable { + /** + * Add a new sink + * @param taskPartitionId unique partition written to a sink + * @return {@link ExchangeSinkHandle} associated with the taskPartitionId + */ + ExchangeSinkHandle addSink(int taskPartitionId); + + /** + * Called when no more sinks will be added with {@link #addSink(int)} + */ + void noMoreSinks(); + + /** + * Registers a sink instance for a task attempt. + * + * @param sinkHandle - handle returned by {@link #addSink(int)} + * @param taskAttemptId - attempt id (how many times attempted) + * @return ExchangeSinkInstanceHandle to be sent to a worker that is needed to create an {@link ExchangeSink} instance + * with {@link ExchangeManager#createSink(ExchangeSinkInstanceHandle, boolean)} + */ + ExchangeSinkInstanceHandle instantiateSink(ExchangeSinkHandle sinkHandle, int taskAttemptId); + + void sinkFinished(ExchangeSinkInstanceHandle handle); + + CompletableFuture> getSourceHandles(); + + ExchangeSourceSplitter split(ExchangeSourceHandle handle, long targetSizeInBytes); + + ExchangeSourceStatistics getExchangeSourceStatistics(ExchangeSourceHandle handle); +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeContext.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeContext.java new file mode 100644 index 000000000..2ce464fb8 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeContext.java @@ -0,0 +1,34 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import io.prestosql.spi.QueryId; + +public class ExchangeContext { + private final QueryId queryId; + private final ExchangeId exchangeId; + + public ExchangeContext(QueryId queryId, ExchangeId exchangeId) { + this.queryId = queryId; + this.exchangeId = exchangeId; + } + + public QueryId getQueryId() { + return queryId; + } + + public ExchangeId getExchangeId() { + return exchangeId; + } +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeId.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeId.java new file mode 100644 index 000000000..ac4248a71 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeId.java @@ -0,0 +1,69 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; + +import java.util.Objects; +import java.util.regex.Pattern; + +import static java.util.Objects.requireNonNull; +import static java.util.UUID.randomUUID; + +public class ExchangeId { + private static final Pattern ID_PATTERN = Pattern.compile("[a-zA-Z\\d_-]+"); + + private final String id; + + @JsonCreator + public ExchangeId(String id) { + requireNonNull(id, "id is null"); + if (!ID_PATTERN.matcher(id).matches()) { + throw new IllegalArgumentException("Invalid exchange id: " + id); + } + this.id = id; + } + + public static ExchangeId createRandomExchangeId() { + return new ExchangeId(randomUUID().toString()); + } + + @JsonValue + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ExchangeId that = (ExchangeId) o; + return Objects.equals(id, that.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } + + @Override + public String toString() { + return id; + } +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManager.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManager.java new file mode 100644 index 000000000..b1ddbf42b --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManager.java @@ -0,0 +1,38 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import javax.annotation.concurrent.ThreadSafe; +import java.util.List; + +/** + * Service provider interface for an external exchange + * It's used to exchange data between stages + */ +@ThreadSafe +public interface ExchangeManager { + /** + * create an external exchange between a pair of stages + * + * @param context information about the query and stage being executed + * @param outputPartitionCount number of distinct partitions to be created by the exchange + * @return {@link Exchange} instance to be used by coordinator to interact with the external exchange + */ + Exchange createExchange(ExchangeContext context, int outputPartitionCount); + + ExchangeSink createSink(ExchangeSinkInstanceHandle handle, boolean preserveRecordsOrder); + + ExchangeSource createSource(List handles); + +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerFactory.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerFactory.java new file mode 100644 index 000000000..b8ded8bf2 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerFactory.java @@ -0,0 +1,24 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import java.util.Map; + +public interface ExchangeManagerFactory { + String getName(); + + ExchangeManager create(Map config); + + ExchangeManagerHandleResolver getHandleResolver(); +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerHandleResolver.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerHandleResolver.java new file mode 100644 index 000000000..118962222 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeManagerHandleResolver.java @@ -0,0 +1,20 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +public interface ExchangeManagerHandleResolver { + Class getExchangeSinkInstanceHandleClass(); + + Class getExchangeSourceHandleClass(); +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSink.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSink.java new file mode 100644 index 000000000..186921ac6 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSink.java @@ -0,0 +1,32 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import io.airlift.slice.Slice; + +import java.util.concurrent.CompletableFuture; + +public interface ExchangeSink { + CompletableFuture NOT_BLOCKED = CompletableFuture.completedFuture(null); + + CompletableFuture isBlocked(); + + void add(int partitionId, Slice slice); + + long getMemoryUsage(); + + CompletableFuture finish(); + + CompletableFuture abort(); +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkHandle.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkHandle.java new file mode 100644 index 000000000..c4bf21851 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkHandle.java @@ -0,0 +1,17 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +public interface ExchangeSinkHandle { +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkInstanceHandle.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkInstanceHandle.java new file mode 100644 index 000000000..69d20e9d0 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSinkInstanceHandle.java @@ -0,0 +1,17 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +public interface ExchangeSinkInstanceHandle { +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSource.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSource.java new file mode 100644 index 000000000..72781f08d --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSource.java @@ -0,0 +1,38 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +import io.airlift.slice.Slice; + +import javax.annotation.Nullable; +import javax.annotation.concurrent.ThreadSafe; +import java.io.Closeable; +import java.util.concurrent.CompletableFuture; + +@ThreadSafe +public interface ExchangeSource extends Closeable { + CompletableFuture NOT_BLOCKED = CompletableFuture.completedFuture(null); + + CompletableFuture isBlocked(); + + boolean isFinished(); + + @Nullable + Slice read(); + + long getMemoryUsage(); + + @Override + void close(); +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceHandle.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceHandle.java new file mode 100644 index 000000000..bee185421 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceHandle.java @@ -0,0 +1,17 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +public interface ExchangeSourceHandle { +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceSplitter.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceSplitter.java new file mode 100644 index 000000000..f9f4e07b8 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceSplitter.java @@ -0,0 +1,17 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +public class ExchangeSourceSplitter { +} diff --git a/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceStatistics.java b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceStatistics.java new file mode 100644 index 000000000..4a5da5007 --- /dev/null +++ b/presto-spi/src/main/java/io/prestosql/spi/exchange/ExchangeSourceStatistics.java @@ -0,0 +1,26 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.spi.exchange; + +public class ExchangeSourceStatistics { + private final long sizeInBytes; + + public ExchangeSourceStatistics(long sizeInBytes) { + this.sizeInBytes = sizeInBytes; + } + + public long getSizeInBytes() { + return sizeInBytes; + } +} -- Gitee From 347027439457e0023d1180401a5bbecc1e12fb58 Mon Sep 17 00:00:00 2001 From: Alex Zhang Date: Fri, 8 Jul 2022 11:45:51 +0800 Subject: [PATCH 25/30] batch query - define exchange manager module --- hetu-exchange-file-system/pom.xml | 108 ++++++++++++++++++ .../ExchangeFileSystemConfig.java | 60 ++++++++++ pom.xml | 1 + .../exchange/ExchangeManagerModule.java | 25 ++++ .../exchange/ExchangeManagerRegistry.java | 96 ++++++++++++++++ .../metadata/ExchangeHandleResolver.java | 42 +++++++ .../io/prestosql/server/PluginManager.java | 63 +++++----- .../main/java/io/prestosql/spi/Plugin.java | 90 ++++++--------- 8 files changed, 397 insertions(+), 88 deletions(-) create mode 100644 hetu-exchange-file-system/pom.xml create mode 100644 hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java create mode 100644 presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerModule.java create mode 100644 presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerRegistry.java create mode 100644 presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java diff --git a/hetu-exchange-file-system/pom.xml b/hetu-exchange-file-system/pom.xml new file mode 100644 index 000000000..90ed76756 --- /dev/null +++ b/hetu-exchange-file-system/pom.xml @@ -0,0 +1,108 @@ + + + + presto-root + io.hetu.core + 1.8.0-SNAPSHOT + + 4.0.0 + + exchange-file-system + + + ${project.parent.basedir} + 2.17.2 + 13.0 + 2.0.1.Final + 3.10 + 1.10.19 + 6.10 + 2.2.3 + 0.8.2 + 3.1.1 + + + + + io.airlift + slice + provided + + + + io.airlift + units + provided + + + + io.airlift + configuration + + + io.airlift + log + + + io.airlift + bootstrap + + + log4j-over-slf4j + org.slf4j + + + slf4j-jdk14 + org.slf4j + + + + + io.airlift + concurrent + + + javax.inject + javax.inject + + + com.google.guava + guava + + + + com.google.inject + guice + + + + + + + org.jacoco + jacoco-maven-plugin + ${version.jacoco-maven-plugin} + + false + + + + + prepare-agent + + + + report + test + + report + + + + + + + + \ No newline at end of file diff --git a/hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java b/hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java new file mode 100644 index 000000000..2da0b7ddd --- /dev/null +++ b/hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java @@ -0,0 +1,60 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.collect.ImmutableList; +import io.airlift.units.DataSize; + +import javax.validation.constraints.NotEmpty; +import javax.validation.constraints.NotNull; +import java.net.URI; +import java.util.Arrays; +import java.util.List; + +import static io.airlift.units.DataSize.Unit.GIGABYTE; +import static io.airlift.units.DataSize.Unit.MEGABYTE; +import static com.google.common.base.Strings.isNullOrEmpty; + +public class ExchangeFileSystemConfig { + private List baseDirectories = ImmutableList.of(); + private boolean exchangeEncryptionEnabled = true; + + private DataSize maxPageStorageSize = new DataSize(16, MEGABYTE); + private int exchangeSinkBufferPoolMinSize = 10; + private int exchangeSinkBuffersPerPartition = 2; + private DataSize exchangeSinkMaxFileSize = new DataSize(1, GIGABYTE); + private int exchangeSourceConcurrentReaders = 4; + private int maxOutputPartitionCount = 50; + private int exchangeFileListingParallelism = 50; + + @NotNull + @NotEmpty(message = "At least one base directory needs to be configured") + public List getBaseDirectories() { + return baseDirectories; + } + + public ExchangeFileSystemConfig setBaseDirectories(String baseDirectories) { + if (baseDirectories != null && isNullOrEmpty(baseDirectories)) { + ImmutableList.Builder builder = ImmutableList.builder(); + Arrays.stream(baseDirectories.split(",")).forEach(dir -> { + if(!dir.endsWith(PATH_SEPARATOR)) { + dir += PATH_SEPARATOR; + } + builder.add(URI.create(dir)); + }); + this.baseDirectories = builder.build(); + } + return this; + } +} diff --git a/pom.xml b/pom.xml index fa823205c..e99dfaef4 100644 --- a/pom.xml +++ b/pom.xml @@ -157,6 +157,7 @@ hetu-greenplum hetu-clickhouse hetu-kylin + hetu-exchange-file-system diff --git a/presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerModule.java b/presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerModule.java new file mode 100644 index 000000000..295c77604 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerModule.java @@ -0,0 +1,25 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.exchange; + +import com.google.inject.Binder; +import com.google.inject.Module; +import com.google.inject.Scopes; + +public class ExchangeManagerModule implements Module { + @Override + public void configure(Binder binder) { + binder.bind(ExchangeManagerRegistry.class).in(Scopes.SINGLETON); + } +} diff --git a/presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerRegistry.java b/presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerRegistry.java new file mode 100644 index 000000000..755a7ed5d --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/exchange/ExchangeManagerRegistry.java @@ -0,0 +1,96 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.exchange; + +import com.google.inject.Inject; +import io.airlift.log.Logger; +import io.prestosql.metadata.ExchangeHandleResolver; +import io.prestosql.spi.classloader.ThreadContextClassLoader; +import io.prestosql.spi.exchange.ExchangeManager; +import io.prestosql.spi.exchange.ExchangeManagerFactory; + +import java.io.File; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkState; +import static com.google.common.base.Strings.isNullOrEmpty; +import static io.airlift.configuration.ConfigurationLoader.loadPropertiesFrom; +import static java.lang.String.format; +import static java.util.Objects.requireNonNull; + +public class ExchangeManagerRegistry { + private static final Logger log = Logger.get(ExchangeManagerRegistry.class); + + private static final File CONFIG_FILE = new File("etc/exchange-manager.properties"); + private static final String EXCHANGE_MANAGER_NAME_PROPERTY = "exchange-manager.name"; + + private final ExchangeHandleResolver handleResolver; + private final Map exchangeManagerFactories = new ConcurrentHashMap<>(); + private volatile ExchangeManager exchangeManager; + + @Inject + public ExchangeManagerRegistry(ExchangeHandleResolver handleResolver) { + this.handleResolver = requireNonNull(handleResolver, "handleResolver is null"); + } + + public void addExchangeManagerFactory(ExchangeManagerFactory exchangeManagerFactory) { + requireNonNull(exchangeManagerFactory, "exchangeManagerFactory is null"); + if (exchangeManagerFactories.putIfAbsent(exchangeManagerFactory.getName(), exchangeManagerFactory) != null) { + throw new IllegalArgumentException(format("Exchange manager factory '%s' is already registered", exchangeManagerFactory.getName())); + } + } + + public void loadExchangeManager() { + if (!CONFIG_FILE.exists()) { + return; + } + try { + Map properties = loadPropertiesFrom(CONFIG_FILE.getPath()); + String name = properties.remove(EXCHANGE_MANAGER_NAME_PROPERTY); + checkArgument(!isNullOrEmpty(name), "Exchange manager configuration %s does not contain %s", CONFIG_FILE, EXCHANGE_MANAGER_NAME_PROPERTY); + } catch (IOException e) { + throw new UncheckedIOException("Failed to read configuration file: " + CONFIG_FILE, e); + } + } + + public synchronized void loadExchangeManager(String name, Map properties) { + log.info("-- Loading exchange manager %s --", name); + + checkState(exchangeManager == null, "exchangeManager is already loaded"); + ExchangeManagerFactory factory = exchangeManagerFactories.get(name); + checkArgument(factory != null, "Exchange manager factory '%s' is not registered. Available factories: %s", name, exchangeManagerFactories.keySet()); + + ExchangeManager exchangeManager; + try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(factory.getClass().getClassLoader())) { + exchangeManager = factory.create(properties); + } + handleResolver.setExchangeManagerHandleResolver(factory.getHandleResolver()); + + log.info("-- Loaded exchange manager %s --", name); + this.exchangeManager = exchangeManager; + } + + private Map loadProperties(File configFile) { + try { + return new HashMap<>(loadPropertiesFrom(configFile.getPath())); + } catch (IOException e) { + throw new UncheckedIOException("Failed to read configuration file: " + configFile, e); + } + } +} diff --git a/presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java b/presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java new file mode 100644 index 000000000..debcda388 --- /dev/null +++ b/presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.metadata; + +import io.prestosql.spi.exchange.ExchangeManagerHandleResolver; +import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; +import io.prestosql.spi.exchange.ExchangeSourceHandle; + +import java.util.concurrent.atomic.AtomicReference; + +import static com.google.common.base.Preconditions.checkState; + +public class ExchangeHandleResolver { + private final AtomicReference exchangeManagerHandleResolver = new AtomicReference<>(); + + public void setExchangeManagerHandleResolver(ExchangeManagerHandleResolver resolver) { + checkState(exchangeManagerHandleResolver.compareAndSet(null, resolver), "ExchangeManagerHandleResolver is already set"); + } + + public Class getExchangeSinkInstanceHandleClass() { + ExchangeManagerHandleResolver resolver = exchangeManagerHandleResolver.get(); + checkState(resolver != null, "ExchangeManagerHandleResolver is not set"); + return resolver.getExchangeSinkInstanceHandleClass(); + } + + public Class getExchangeSourceHandleClass() { + ExchangeManagerHandleResolver resolver = exchangeManagerHandleResolver.get(); + checkState(resolver != null, "ExchangeManagerHandleResolver is not set"); + return resolver.getExchangeSourceHandleClass(); + } +} diff --git a/presto-main/src/main/java/io/prestosql/server/PluginManager.java b/presto-main/src/main/java/io/prestosql/server/PluginManager.java index 3f9ff137e..6339fdc2b 100644 --- a/presto-main/src/main/java/io/prestosql/server/PluginManager.java +++ b/presto-main/src/main/java/io/prestosql/server/PluginManager.java @@ -22,6 +22,7 @@ import io.airlift.resolver.DefaultArtifact; import io.prestosql.connector.ConnectorManager; import io.prestosql.cube.CubeManager; import io.prestosql.eventlistener.EventListenerManager; +import io.prestosql.exchange.ExchangeManagerRegistry; import io.prestosql.execution.resourcegroups.ResourceGroupManager; import io.prestosql.failuredetector.FailureDetectorManager; import io.prestosql.failuredetector.FailureDetectorPlugin; @@ -40,6 +41,7 @@ import io.prestosql.spi.classloader.ThreadContextClassLoader; import io.prestosql.spi.connector.ConnectorFactory; import io.prestosql.spi.cube.CubeProvider; import io.prestosql.spi.eventlistener.EventListenerFactory; +import io.prestosql.spi.exchange.ExchangeManagerFactory; import io.prestosql.spi.failuredetector.FailureRetryFactory; import io.prestosql.spi.filesystem.HetuFileSystemClientFactory; import io.prestosql.spi.function.FunctionNamespaceManagerFactory; @@ -82,8 +84,7 @@ import static io.prestosql.server.PluginDiscovery.writePluginServices; import static java.util.Objects.requireNonNull; @ThreadSafe -public class PluginManager -{ +public class PluginManager { private static final ImmutableList SPI_PACKAGES = ImmutableList.builder() .add("io.hetu.core.spi.") .add("io.prestosql.spi.") @@ -115,6 +116,9 @@ public class PluginManager private final FileSystemClientManager fileSystemClientManager; private final FailureDetectorManager failureDetectorManager; private final HeuristicIndexerManager heuristicIndexerManager; + + private final ExchangeManagerRegistry exchangeManagerRegistry; + private final SessionPropertyDefaults sessionPropertyDefaults; private final ArtifactResolver resolver; private final File installedPluginsDir; @@ -142,8 +146,8 @@ public class PluginManager FileSystemClientManager fileSystemClientManager, HetuMetaStoreManager hetuMetaStoreManager, HeuristicIndexerManager heuristicIndexerManager, - FailureDetectorManager failureDetectorManager) - { + FailureDetectorManager failureDetectorManager, + ExchangeManagerRegistry exchangeManagerRegistry) { requireNonNull(nodeInfo, "nodeInfo is null"); requireNonNull(config, "config is null"); this.config = config; @@ -152,8 +156,7 @@ public class PluginManager externalFunctionsPluginsDir = config.getExternalFunctionsPluginsDir(); if (config.getPlugins() == null) { this.plugins = ImmutableList.of(); - } - else { + } else { this.plugins = ImmutableList.copyOf(config.getPlugins()); } this.resolver = new ArtifactResolver(config.getMavenLocalRepository(), config.getMavenRemoteRepository()); @@ -175,11 +178,11 @@ public class PluginManager this.hetuMetaStoreManager = requireNonNull(hetuMetaStoreManager, "hetuMetaStoreManager is null"); this.heuristicIndexerManager = requireNonNull(heuristicIndexerManager, "heuristicIndexerManager is null"); this.failureDetectorManager = requireNonNull(failureDetectorManager, "failureDetectorManager is null"); + this.exchangeManagerRegistry = requireNonNull(exchangeManagerRegistry, "exchangeManagerRegistry is null"); } public void loadPlugins() - throws Exception - { + throws Exception { if (!pluginsLoading.compareAndSet(false, true)) { return; } @@ -206,14 +209,12 @@ public class PluginManager } private void loadPlugin(String plugin) - throws Exception - { + throws Exception { loadPlugin(plugin, false); } private void loadPlugin(String plugin, boolean onlyInstallFunctionsPlugin) - throws Exception - { + throws Exception { log.info("-- Loading plugin %s --", plugin); URLClassLoader pluginClassLoader = buildClassLoader(plugin); try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(pluginClassLoader)) { @@ -222,8 +223,7 @@ public class PluginManager log.info("-- Finished loading plugin %s --", plugin); } - private void loadPlugin(URLClassLoader pluginClassLoader, boolean onlyInstallFunctionsPlugin) - { + private void loadPlugin(URLClassLoader pluginClassLoader, boolean onlyInstallFunctionsPlugin) { ServiceLoader serviceLoader = ServiceLoader.load(Plugin.class, pluginClassLoader); List pluginSet = ImmutableList.copyOf(serviceLoader); checkState(!pluginSet.isEmpty(), "No service providers of type %s", Plugin.class.getName()); @@ -244,8 +244,7 @@ public class PluginManager } } - private void installFunctionsPlugin(Plugin plugin) - { + private void installFunctionsPlugin(Plugin plugin) { for (Class functionClass : plugin.getFunctions()) { log.info("Registering functions from %s", functionClass.getName()); metadataManager.getFunctionAndTypeManager().registerBuiltInFunctions(extractFunctions(functionClass)); @@ -257,8 +256,7 @@ public class PluginManager } } - public void installPlugin(Plugin plugin) - { + public void installPlugin(Plugin plugin) { for (BlockEncoding blockEncoding : plugin.getBlockEncodings()) { log.info("Registering block encoding %s", blockEncoding.getName()); metadataManager.getFunctionAndTypeManager().addBlockEncoding(blockEncoding); @@ -357,12 +355,16 @@ public class PluginManager FailureDetectorManager.addFailureRetryFactory(failureRetryFactory); } + for (ExchangeManagerFactory exchangeManagerFactory : plugin.getExchangeManagerFactories()) { + log.info("Registering exchange manager %s", exchangeManagerFactory.getName()); + exchangeManagerRegistry.addExchangeManagerFactory(exchangeManagerFactory); + } + installFunctionsPlugin(plugin); } private URLClassLoader buildClassLoader(String plugin) - throws Exception - { + throws Exception { File file = new File(plugin); if (file.isFile() && (file.getName().equals("pom.xml") || file.getName().endsWith(".pom"))) { return buildClassLoaderFromPom(file); @@ -374,8 +376,7 @@ public class PluginManager } private URLClassLoader buildClassLoaderFromPom(File pomFile) - throws Exception - { + throws Exception { List artifacts = resolver.resolvePom(pomFile); URLClassLoader classLoader = createClassLoader(artifacts, pomFile.getPath()); @@ -389,8 +390,7 @@ public class PluginManager } private URLClassLoader buildClassLoaderFromDirectory(File dir) - throws Exception - { + throws Exception { log.debug("Classpath for %s:", dir.getName()); List urls = new ArrayList<>(); for (File file : listFiles(dir)) { @@ -401,16 +401,14 @@ public class PluginManager } private URLClassLoader buildClassLoaderFromCoordinates(String coordinates) - throws Exception - { + throws Exception { Artifact rootArtifact = new DefaultArtifact(coordinates); List artifacts = resolver.resolveArtifacts(rootArtifact); return createClassLoader(artifacts, rootArtifact.toString()); } private URLClassLoader createClassLoader(List artifacts, String name) - throws IOException - { + throws IOException { log.debug("Classpath for %s:", name); List urls = new ArrayList<>(); for (Artifact artifact : sortedArtifacts(artifacts)) { @@ -424,14 +422,12 @@ public class PluginManager return createClassLoader(urls); } - private URLClassLoader createClassLoader(List urls) - { + private URLClassLoader createClassLoader(List urls) { ClassLoader parent = getClass().getClassLoader(); return new PluginClassLoader(urls, parent, SPI_PACKAGES); } - private static List listFiles(File installedPluginsDir) - { + private static List listFiles(File installedPluginsDir) { if (installedPluginsDir != null && installedPluginsDir.isDirectory()) { File[] files = installedPluginsDir.listFiles(); if (files != null) { @@ -442,8 +438,7 @@ public class PluginManager return ImmutableList.of(); } - private static List sortedArtifacts(List artifacts) - { + private static List sortedArtifacts(List artifacts) { List list = new ArrayList<>(artifacts); Collections.sort(list, Ordering.natural().nullsLast().onResultOf(Artifact::getFile)); return list; diff --git a/presto-spi/src/main/java/io/prestosql/spi/Plugin.java b/presto-spi/src/main/java/io/prestosql/spi/Plugin.java index d9d8c69f3..376cc9f0a 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/Plugin.java +++ b/presto-spi/src/main/java/io/prestosql/spi/Plugin.java @@ -17,6 +17,7 @@ import io.prestosql.spi.block.BlockEncoding; import io.prestosql.spi.connector.ConnectorFactory; import io.prestosql.spi.cube.CubeProvider; import io.prestosql.spi.eventlistener.EventListenerFactory; +import io.prestosql.spi.exchange.ExchangeManagerFactory; import io.prestosql.spi.failuredetector.FailureRetryFactory; import io.prestosql.spi.filesystem.HetuFileSystemClientFactory; import io.prestosql.spi.function.FunctionNamespaceManagerFactory; @@ -41,127 +42,108 @@ import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; -public interface Plugin -{ - default Iterable getConnectorFactories() - { +public interface Plugin { + default Iterable getConnectorFactories() { return emptyList(); } - default Iterable getBlockEncodings() - { + default Iterable getBlockEncodings() { return emptyList(); } - default Iterable getTypes() - { + default Iterable getTypes() { return emptyList(); } - default Iterable getParametricTypes() - { + default Iterable getParametricTypes() { return emptyList(); } - default Set> getFunctions() - { + default Set> getFunctions() { return emptySet(); } - default Set getDynamicHiveFunctions() - { + default Set getDynamicHiveFunctions() { return emptySet(); } - default Iterable getSystemAccessControlFactories() - { + default Iterable getSystemAccessControlFactories() { return emptyList(); } - default Iterable getGroupProviderFactories() - { + default Iterable getGroupProviderFactories() { return emptyList(); } - default Iterable getPasswordAuthenticatorFactories() - { + default Iterable getPasswordAuthenticatorFactories() { return emptyList(); } - default Iterable getEventListenerFactories() - { + default Iterable getEventListenerFactories() { return emptyList(); } - default Iterable getResourceGroupConfigurationManagerFactories() - { + default Iterable getResourceGroupConfigurationManagerFactories() { return emptyList(); } - default Iterable getSessionPropertyConfigurationManagerFactories() - { + default Iterable getSessionPropertyConfigurationManagerFactories() { return emptyList(); } - default Iterable getStateStoreFactories() - { + default Iterable getStateStoreFactories() { return emptyList(); } - default Iterable getStateStoreBootstrappers() - { + default Iterable getStateStoreBootstrappers() { return emptyList(); } - default Iterable getSeedStoreFactories() - { + default Iterable getSeedStoreFactories() { return emptyList(); } - default Iterable getCubeProviders() - { + default Iterable getCubeProviders() { return emptyList(); } - default Iterable getFileSystemClientFactory() - { + default Iterable getFileSystemClientFactory() { return emptyList(); } - default Iterable getFailureRetryFactory() - { + default Iterable getFailureRetryFactory() { return emptyList(); } - default Iterable getHetuMetaStoreFactories() - { + default Iterable getHetuMetaStoreFactories() { return emptyList(); } - default Iterable getIndexFactories() - { + default Iterable getIndexFactories() { return emptyList(); } - default Optional getConnectorWithProperties() - { + default Optional getConnectorWithProperties() { return Optional.empty(); } - default void setExternalFunctionsDir(File externalFuncsDir) - {} + default void setExternalFunctionsDir(File externalFuncsDir) { + } - default void setMaxFunctionRunningTimeEnable(boolean enable) - {} + default void setMaxFunctionRunningTimeEnable(boolean enable) { + } - default void setMaxFunctionRunningTimeInSec(long time) - {} + default void setMaxFunctionRunningTimeInSec(long time) { + } - default void setFunctionRunningThreadPoolSize(int size) - {} + default void setFunctionRunningThreadPoolSize(int size) { + } + + default Iterable getFunctionNamespaceManagerFactories() { + return emptyList(); + } - default Iterable getFunctionNamespaceManagerFactories() - { + default Iterable getExchangeManagerFactories() { return emptyList(); } } -- Gitee From cde9ffdbad3afdb693cca99aa1bc65c6b790504c Mon Sep 17 00:00:00 2001 From: Alex Zhang Date: Mon, 11 Jul 2022 19:52:38 +0800 Subject: [PATCH 26/30] create exchange file system structure --- hetu-exchange-file-system/pom.xml | 108 ----- hetu-exchange-filesystem/pom.xml | 383 ++++++++++++++++++ .../exchangefilesystem/ExchangeReader.java | 35 ++ .../ExchangeSourceFile.java | 52 +++ .../exchangefilesystem/ExchangeWriter.java | 28 ++ .../exchangefilesystem/ExecutionStats.java | 97 +++++ .../plugin/exchangefilesystem/FileStatus.java | 80 ++++ .../FileSystemExchange.java | 74 ++++ .../FileSystemExchangeConfig.java | 123 +++++- .../FileSystemExchangeErrorCode.java | 38 ++ .../FileSystemExchangeFutures.java | 29 ++ .../FileSystemExchangeManager.java | 48 +++ .../FileSystemExchangeManagerFactory.java | 42 ++ .../FileSystemExchangePlugin.java | 28 ++ .../FileSystemExchangeSink.java | 55 +++ .../FileSystemExchangeSinkHandle.java | 82 ++++ .../FileSystemExchangeSourceHandle.java | 41 ++ .../FileSystemExchangeStorage.java | 42 ++ .../hdfs/ExchangeHdfsConfig.java | 18 + .../local/ExchangeLocalConfig.java | 18 + pom.xml | 2 +- 21 files changed, 1309 insertions(+), 114 deletions(-) delete mode 100644 hetu-exchange-file-system/pom.xml create mode 100644 hetu-exchange-filesystem/pom.xml create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java rename hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java => hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java (35%) create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java diff --git a/hetu-exchange-file-system/pom.xml b/hetu-exchange-file-system/pom.xml deleted file mode 100644 index 90ed76756..000000000 --- a/hetu-exchange-file-system/pom.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - - presto-root - io.hetu.core - 1.8.0-SNAPSHOT - - 4.0.0 - - exchange-file-system - - - ${project.parent.basedir} - 2.17.2 - 13.0 - 2.0.1.Final - 3.10 - 1.10.19 - 6.10 - 2.2.3 - 0.8.2 - 3.1.1 - - - - - io.airlift - slice - provided - - - - io.airlift - units - provided - - - - io.airlift - configuration - - - io.airlift - log - - - io.airlift - bootstrap - - - log4j-over-slf4j - org.slf4j - - - slf4j-jdk14 - org.slf4j - - - - - io.airlift - concurrent - - - javax.inject - javax.inject - - - com.google.guava - guava - - - - com.google.inject - guice - - - - - - - org.jacoco - jacoco-maven-plugin - ${version.jacoco-maven-plugin} - - false - - - - - prepare-agent - - - - report - test - - report - - - - - - - - \ No newline at end of file diff --git a/hetu-exchange-filesystem/pom.xml b/hetu-exchange-filesystem/pom.xml new file mode 100644 index 000000000..eeece0c03 --- /dev/null +++ b/hetu-exchange-filesystem/pom.xml @@ -0,0 +1,383 @@ + + + + presto-root + io.hetu.core + 1.8.0-SNAPSHOT + + 4.0.0 + + hetu-exchange-filesystem + + + ${project.parent.basedir} + 2.17.2 + 13.0 + 2.0.1.Final + 3.10 + 1.10.19 + 6.10 + 2.2.3 + 0.8.2 + 3.1.1 + + + + + org.codehaus.jettison + jettison + + + stax-api + stax + + + + + + commons-codec + commons-codec + runtime + + + + io.hetu.core + hetu-common + + + + io.prestosql.hadoop + hadoop-apache + + + + + io.hetu.core + presto-spi + provided + + + jackson-annotations + com.fasterxml.jackson.core + + + + + + com.fasterxml.jackson.core + jackson-annotations + provided + + + + io.airlift + slice + provided + + + + io.airlift + units + provided + + + + io.airlift + configuration + + + io.airlift + log + + + io.airlift + bootstrap + + + log4j-over-slf4j + org.slf4j + + + slf4j-jdk14 + org.slf4j + + + + + io.airlift + concurrent + + + io.airlift + stats + + + javax.inject + javax.inject + + + com.google.guava + guava + + + + com.google.inject + guice + + + + org.apache.commons + commons-lang3 + ${version.commons-lang3} + + + + org.apache.hbase + hbase-client + ${version.hbase} + + + error_prone_annotations + com.google.errorprone + + + jcodings + org.jruby.jcodings + + + hadoop-common + org.apache.hadoop + + + hadoop-auth + org.apache.hadoop + + + hbase-shaded-netty + org.apache.hbase.thirdparty + + + + + hbase-shaded-netty + org.apache.hbase.thirdparty + 4.0.0 + runtime + + + org.apache.hbase + hbase-server + ${version.hbase} + + + jersey-server + org.glassfish.jersey.core + + + jersey-container-servlet-core + org.glassfish.jersey.containers + + + jaxb-api + javax.xml.bind + + + hk2-locator + org.glassfish.hk2 + + + jersey-common + org.glassfish.jersey.core + + + hadoop-hdfs + org.apache.hadoop + + + hadoop-common + org.apache.hadoop + + + hadoop-mapreduce-client-core + org.apache.hadoop + + + hadoop-auth + org.apache.hadoop + + + hadoop-annotations + org.apache.hadoop + + + commons-logging + commons-logging + + + org.apache.hadoop + hadoop-distcp + + + jetty-util-ajax + org.eclipse.jetty + + + jetty-webapp + org.eclipse.jetty + + + log4j + log4j + + + slf4j-log4j12 + org.slf4j + + + + + org.apache.logging.log4j + log4j-api + ${dep.log4j.version} + runtime + + + org.apache.logging.log4j + log4j-core + ${dep.log4j.version} + runtime + + + org.eclipse.jetty + jetty-webapp + 9.4.46.v20220331 + runtime + + + org.apache.hadoop + hadoop-distcp + runtime + + + org.apache.hbase + hbase-protocol-shaded + ${version.hbase} + + + jackson-databind + + + jackson-annotations + com.fasterxml.jackson.core + + + com.fasterxml.jackson.core + runtime + + + + org.apache.hbase + hbase-common + ${version.hbase} + + + hadoop-common + org.apache.hadoop + + + log4j + log4j + + + slf4j-log4j12 + org.slf4j + + + + + + + org.testng + testng + test + + + + org.weakref + jmxutils + ${dep.jmxutils.version} + compile + + + + io.hetu.core + presto-tests + test + + + plexus-cipher + org.sonatype.plexus + + + plexus-classworlds + org.codehaus.plexus + + + + + + io.hetu.core + presto-plugin-toolkit + test + + + + io.hetu.core + hetu-metastore + test + + + + io.airlift + testing-mysql-server + test + + + + org.mockito + mockito-core + ${version.mockito-all} + test + + + + + + + org.jacoco + jacoco-maven-plugin + ${version.jacoco-maven-plugin} + + false + + + + + prepare-agent + + + + report + test + + report + + + + + + + + \ No newline at end of file diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java new file mode 100644 index 000000000..d683a58d9 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java @@ -0,0 +1,35 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.util.concurrent.ListenableFuture; +import io.airlift.slice.Slice; + +import java.io.Closeable; +import java.io.IOException; + +public interface ExchangeReader + extends Closeable +{ + Slice read() throws IOException; + + ListenableFuture isBlocked(); + + Long getRetainedSize(); + + boolean isFinished(); + + @Override + void close(); +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java new file mode 100644 index 000000000..002d33187 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java @@ -0,0 +1,52 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import javax.annotation.concurrent.Immutable; +import javax.crypto.SecretKey; + +import java.net.URI; +import java.util.Optional; + +import static java.util.Objects.requireNonNull; + +@Immutable +public class ExchangeSourceFile +{ + private final URI fileUri; + private final Optional secretKey; + private final long fileSize; + + public ExchangeSourceFile(URI fileUri, Optional secretKey, long fileSize) + { + this.fileUri = requireNonNull(fileUri, "fileUri is null"); + this.secretKey = requireNonNull(secretKey, "secretKey is null"); + this.fileSize = fileSize; + } + + public URI getFileUri() + { + return fileUri; + } + + public Optional getSecretKey() + { + return secretKey; + } + + public long getFileSize() + { + return fileSize; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java new file mode 100644 index 000000000..7dd3fff82 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java @@ -0,0 +1,28 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.util.concurrent.ListenableFuture; +import io.airlift.slice.Slice; + +public interface ExchangeWriter +{ + ListenableFuture write(Slice slice); + + ListenableFuture finish(); + + ListenableFuture abort(); + + long getRetainedSize(); +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java new file mode 100644 index 000000000..934d66db5 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java @@ -0,0 +1,97 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.base.Stopwatch; +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import io.airlift.stats.TimeStat; +import org.checkerframework.checker.nullness.qual.Nullable; +import org.weakref.jmx.Managed; +import org.weakref.jmx.Nested; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Supplier; + +import static com.google.common.util.concurrent.MoreExecutors.directExecutor; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +public class ExecutionStats +{ + private final TimeStat finished = new TimeStat(MILLISECONDS); + private final TimeStat failed = new TimeStat(MILLISECONDS); + + public T record(Supplier call) + { + Stopwatch stopwatch = Stopwatch.createStarted(); + try { + T result = call.get(); + finished.add(stopwatch.elapsed(MILLISECONDS), MILLISECONDS); + return result; + } + catch (Throwable t) { + failed.add(stopwatch.elapsed(MILLISECONDS), MILLISECONDS); + throw t; + } + } + + public CompletableFuture record(CompletableFuture future) + { + Stopwatch stopwatch = Stopwatch.createStarted(); + future.whenComplete((value, failure) -> { + if (failure == null) { + finished.add(stopwatch.elapsed(MILLISECONDS), MILLISECONDS); + } + else { + failed.add(stopwatch.elapsed(MILLISECONDS), MILLISECONDS); + } + }); + return future; + } + + public ListenableFuture record(ListenableFuture future) + { + Stopwatch stopwatch = Stopwatch.createStarted(); + Futures.addCallback(future, new FutureCallback() + { + @Override + public void onSuccess(@Nullable T result) + { + finished.add(stopwatch.elapsed(MILLISECONDS), MILLISECONDS); + } + + @Override + public void onFailure(Throwable t) + { + failed.add(stopwatch.elapsed(MILLISECONDS), MILLISECONDS); + } + }, directExecutor()); + return future; + } + + @Managed + @Nested + public TimeStat getFinished() + { + return finished; + } + + @Managed + @Nested + public TimeStat getFailed() + { + return failed; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java new file mode 100644 index 000000000..4bfbddc93 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java @@ -0,0 +1,80 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.openjdk.jol.info.ClassLayout; + +import javax.annotation.concurrent.Immutable; + +import java.util.Objects; + +import static com.google.common.base.MoreObjects.toStringHelper; + +@Immutable +public class FileStatus +{ + private static final int INSTANCE_SIZE = ClassLayout.parseClass(FileStatus.class).instanceSize(); + + private final String filePath; + private final long fileSize; + + @JsonCreator + public FileStatus(@JsonProperty("filePath") String filePath, @JsonProperty("fileSize") long fileSize) + { + this.filePath = filePath; + this.fileSize = fileSize; + } + + @JsonProperty + public String getFilePath() + { + return filePath; + } + + @JsonProperty + public long getFileSize() + { + return fileSize; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + FileStatus that = (FileStatus) o; + return fileSize == that.fileSize && filePath.equals(that.filePath); + } + + @Override + public int hashCode() + { + return Objects.hash(filePath, fileSize); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("filePath", filePath) + .add("fileSize", fileSize) + .toString(); + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java new file mode 100644 index 000000000..0b4467e13 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java @@ -0,0 +1,74 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import io.prestosql.spi.exchange.Exchange; +import io.prestosql.spi.exchange.ExchangeSinkHandle; +import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; +import io.prestosql.spi.exchange.ExchangeSourceHandle; +import io.prestosql.spi.exchange.ExchangeSourceSplitter; +import io.prestosql.spi.exchange.ExchangeSourceStatistics; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.CompletableFuture; + +public class FileSystemExchange + implements Exchange +{ + @Override + public ExchangeSinkHandle addSink(int taskPartitionId) + { + return null; + } + + @Override + public void noMoreSinks() + { + } + + @Override + public ExchangeSinkInstanceHandle instantiateSink(ExchangeSinkHandle sinkHandle, int taskAttemptId) + { + return null; + } + + @Override + public void sinkFinished(ExchangeSinkInstanceHandle handle) + { + } + + @Override + public CompletableFuture> getSourceHandles() + { + return null; + } + + @Override + public ExchangeSourceSplitter split(ExchangeSourceHandle handle, long targetSizeInBytes) + { + return null; + } + + @Override + public ExchangeSourceStatistics getExchangeSourceStatistics(ExchangeSourceHandle handle) + { + return null; + } + + @Override + public void close() throws IOException + { + } +} diff --git a/hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java similarity index 35% rename from hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java index 2da0b7ddd..0fdbbea4a 100644 --- a/hetu-exchange-file-system/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeFileSystemConfig.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java @@ -14,19 +14,25 @@ package io.hetu.core.plugin.exchangefilesystem; import com.google.common.collect.ImmutableList; +import io.airlift.configuration.Config; +import io.airlift.configuration.ConfigDescription; import io.airlift.units.DataSize; +import javax.validation.constraints.Min; import javax.validation.constraints.NotEmpty; import javax.validation.constraints.NotNull; + import java.net.URI; import java.util.Arrays; import java.util.List; +import static com.google.common.base.Strings.isNullOrEmpty; import static io.airlift.units.DataSize.Unit.GIGABYTE; import static io.airlift.units.DataSize.Unit.MEGABYTE; -import static com.google.common.base.Strings.isNullOrEmpty; +import static io.hetu.core.plugin.exchangefilesystem.FileSystemExchangeManager.PATH_SEPARATOR; -public class ExchangeFileSystemConfig { +public class FileSystemExchangeConfig +{ private List baseDirectories = ImmutableList.of(); private boolean exchangeEncryptionEnabled = true; @@ -40,15 +46,19 @@ public class ExchangeFileSystemConfig { @NotNull @NotEmpty(message = "At least one base directory needs to be configured") - public List getBaseDirectories() { + public List getBaseDirectories() + { return baseDirectories; } - public ExchangeFileSystemConfig setBaseDirectories(String baseDirectories) { + @Config("exchange.base-directories") + @ConfigDescription("List of base directories separated by comma") + public FileSystemExchangeConfig setBaseDirectories(String baseDirectories) + { if (baseDirectories != null && isNullOrEmpty(baseDirectories)) { ImmutableList.Builder builder = ImmutableList.builder(); Arrays.stream(baseDirectories.split(",")).forEach(dir -> { - if(!dir.endsWith(PATH_SEPARATOR)) { + if (!dir.endsWith(PATH_SEPARATOR)) { dir += PATH_SEPARATOR; } builder.add(URI.create(dir)); @@ -57,4 +67,107 @@ public class ExchangeFileSystemConfig { } return this; } + + public boolean isExchangeEncryptionEnabled() + { + return exchangeEncryptionEnabled; + } + + @Config("exchange.encryption-enabled") + public FileSystemExchangeConfig setExchangeEncryptionEnabled(boolean exchangeEncryptionEnabled) + { + this.exchangeEncryptionEnabled = exchangeEncryptionEnabled; + return this; + } + + public DataSize getMaxPageStorageSize() + { + return maxPageStorageSize; + } + + @Config("exchange.max-page-storage-size") + @ConfigDescription("Max storage size of a page written to a sink, including the page itself and its size represented by an int") + public FileSystemExchangeConfig setMaxPageStorageSize(DataSize maxPageStorageSize) + { + this.maxPageStorageSize = maxPageStorageSize; + return this; + } + + public int getExchangeSinkBufferPoolMinSize() + { + return exchangeSinkBufferPoolMinSize; + } + + @Config("exchange.sink-buffer-pool-min-size") + public FileSystemExchangeConfig setExchangeSinkBufferPoolMinSize(int exchangeSinkBufferPoolMinSize) + { + this.exchangeSinkBufferPoolMinSize = exchangeSinkBufferPoolMinSize; + return this; + } + + @Min(2) + public int getExchangeSinkBuffersPerPartition() + { + return exchangeSinkBuffersPerPartition; + } + + @Config("exchange.sink-buffers-per-partition") + public FileSystemExchangeConfig setExchangeSinkBuffersPerPartition(int exchangeSinkBuffersPerPartition) + { + this.exchangeSinkBuffersPerPartition = exchangeSinkBuffersPerPartition; + return this; + } + + public DataSize getExchangeSinkMaxFileSize() + { + return exchangeSinkMaxFileSize; + } + + @Config("exchange.sink-max-file-size") + @ConfigDescription("Max size of files written by sinks") + public FileSystemExchangeConfig setExchangeSinkMaxFileSize(DataSize exchangeSinkMaxFileSize) + { + this.exchangeSinkMaxFileSize = exchangeSinkMaxFileSize; + return this; + } + + @Min(1) + public int getExchangeSourceConcurrentReaders() + { + return exchangeSourceConcurrentReaders; + } + + @Config("exchange.source-concurrent-readers") + public FileSystemExchangeConfig setExchangeSourceConcurrentReaders(int exchangeSourceConcurrentReaders) + { + this.exchangeSourceConcurrentReaders = exchangeSourceConcurrentReaders; + return this; + } + + @Min(1) + public int getMaxOutputPartitionCount() + { + return maxOutputPartitionCount; + } + + @Config("exchange.max-output-partition-count") + public FileSystemExchangeConfig setMaxOutputPartitionCount(int maxOutputPartitionCount) + { + this.maxOutputPartitionCount = maxOutputPartitionCount; + return this; + } + + @Min(1) + public int getExchangeFileListingParallelism() + { + return exchangeFileListingParallelism; + } + + @Config("exchange.file-listing-parallelism") + @ConfigDescription("Max parallelism of file listing calls when enumerating spooling files.") + public FileSystemExchangeConfig setExchangeFileListingParallelism(int exchangeFileListingParallelism) + { + this.exchangeFileListingParallelism = exchangeFileListingParallelism; + return this; + } } diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java new file mode 100644 index 000000000..3f0e22ceb --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java @@ -0,0 +1,38 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import io.prestosql.spi.ErrorCode; +import io.prestosql.spi.ErrorCodeSupplier; +import io.prestosql.spi.ErrorType; + +import static io.prestosql.spi.ErrorType.USER_ERROR; + +public enum FileSystemExchangeErrorCode + implements ErrorCodeSupplier +{ + MAX_OUTPUT_PARTITION_COUNT_EXCEEDED(0, USER_ERROR),; + private final ErrorCode errorCode; + + FileSystemExchangeErrorCode(int code, ErrorType type) + { + errorCode = new ErrorCode(code + 0x0510_0000, name(), type); + } + + @Override + public ErrorCode toErrorCode() + { + return errorCode; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java new file mode 100644 index 000000000..09808f71c --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java @@ -0,0 +1,29 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.util.concurrent.ListenableFuture; + + +public class FileSystemExchangeFutures +{ + private FileSystemExchangeFutures() + { + } + + public static ListenableFuture translateFailures(ListenableFuture listenableFuture) + { + return null; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java new file mode 100644 index 000000000..f126aae8b --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java @@ -0,0 +1,48 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import io.prestosql.spi.exchange.Exchange; +import io.prestosql.spi.exchange.ExchangeContext; +import io.prestosql.spi.exchange.ExchangeManager; +import io.prestosql.spi.exchange.ExchangeSink; +import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; +import io.prestosql.spi.exchange.ExchangeSource; +import io.prestosql.spi.exchange.ExchangeSourceHandle; + +import java.util.List; + +public class FileSystemExchangeManager + implements ExchangeManager +{ + public static final String PATH_SEPARATOR = "/"; + + @Override + public Exchange createExchange(ExchangeContext context, int outputPartitionCount) + { + return null; + } + + @Override + public ExchangeSink createSink(ExchangeSinkInstanceHandle handle, boolean preserveRecordsOrder) + { + return null; + } + + @Override + public ExchangeSource createSource(List handles) + { + return null; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java new file mode 100644 index 000000000..dbc2e1537 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import io.prestosql.spi.exchange.ExchangeManager; +import io.prestosql.spi.exchange.ExchangeManagerFactory; +import io.prestosql.spi.exchange.ExchangeManagerHandleResolver; + +import java.util.Map; + +public class FileSystemExchangeManagerFactory + implements ExchangeManagerFactory +{ + @Override + public String getName() + { + return null; + } + + @Override + public ExchangeManager create(Map config) + { + return null; + } + + @Override + public ExchangeManagerHandleResolver getHandleResolver() + { + return null; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java new file mode 100644 index 000000000..80861a7bb --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java @@ -0,0 +1,28 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.Plugin; +import io.prestosql.spi.exchange.ExchangeManagerFactory; + +public class FileSystemExchangePlugin + implements Plugin +{ + @Override + public Iterable getExchangeManagerFactories() + { + return ImmutableList.of(new FileSystemExchangeManagerFactory()); + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java new file mode 100644 index 000000000..dd8c558c7 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java @@ -0,0 +1,55 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import io.airlift.slice.Slice; +import io.prestosql.spi.exchange.ExchangeSink; + +import java.util.concurrent.CompletableFuture; + +public class FileSystemExchangeSink + implements ExchangeSink +{ + public static final String COMMITTED_MARKER_FILE_NAME = "committed"; + public static final String DATA_FILE_SUFFIX = ".data"; + + @Override + public CompletableFuture isBlocked() + { + return null; + } + + @Override + public void add(int partitionId, Slice slice) + { + } + + @Override + public long getMemoryUsage() + { + return 0; + } + + @Override + public CompletableFuture finish() + { + return null; + } + + @Override + public CompletableFuture abort() + { + return null; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java new file mode 100644 index 000000000..40a468ad0 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java @@ -0,0 +1,82 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.fasterxml.jackson.annotation.JsonProperty; +import io.prestosql.spi.exchange.ExchangeSinkHandle; + +import java.util.Arrays; +import java.util.Objects; +import java.util.Optional; + +import static com.google.common.base.MoreObjects.toStringHelper; +import static java.util.Objects.requireNonNull; + +public class FileSystemExchangeSinkHandle + implements ExchangeSinkHandle +{ + private final int partitionId; + private final Optional secretKey; + + public FileSystemExchangeSinkHandle(@JsonProperty("partitionId") int partitionId, @JsonProperty("secretKey") Optional secretKey) + { + this.partitionId = partitionId; + this.secretKey = requireNonNull(secretKey, "secretKey is null"); + } + + @JsonProperty + public int getPartitionId() + { + return partitionId; + } + + @JsonProperty + public Optional getSecretKey() + { + return secretKey; + } + + @Override + public boolean equals(Object o) + { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + FileSystemExchangeSinkHandle that = (FileSystemExchangeSinkHandle) o; + if (secretKey.isPresent() && that.secretKey.isPresent()) { + return partitionId == that.partitionId && Arrays.equals(secretKey.get(), that.secretKey.get()); + } + else { + return partitionId == that.partitionId && !secretKey.isPresent() && !that.secretKey.isPresent(); + } + } + + @Override + public int hashCode() + { + return Objects.hash(partitionId, secretKey); + } + + @Override + public String toString() + { + return toStringHelper(this) + .add("partitionId", partitionId) + .add("secretKey", secretKey.map(val -> "[EDITED]")) + .toString(); + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java new file mode 100644 index 000000000..b343c7cbf --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java @@ -0,0 +1,41 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.google.common.collect.ImmutableList; +import io.prestosql.spi.exchange.ExchangeSourceHandle; +import org.openjdk.jol.info.ClassLayout; + +import java.util.List; +import java.util.Optional; + +import static java.util.Objects.requireNonNull; + +public class FileSystemExchangeSourceHandle + implements ExchangeSourceHandle +{ + private static final int INSTANCE_SIZE = ClassLayout.parseClass(FileSystemExchangeSourceHandle.class).instanceSize(); + + private final int partitionId; + private final Optional secretKey; + private List files; + + public FileSystemExchangeSourceHandle(@JsonProperty("partitionId") int partitionId, @JsonProperty("files") List files, @JsonProperty("secretKey") Optional secretKey) + { + this.partitionId = partitionId; + this.files = ImmutableList.copyOf(requireNonNull(files, "files is null")); + this.secretKey = requireNonNull(secretKey, "secretKey is null"); + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java new file mode 100644 index 000000000..d73aa262c --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java @@ -0,0 +1,42 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem; + +import com.google.common.util.concurrent.ListenableFuture; + +import javax.crypto.SecretKey; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Queue; + +public interface FileSystemExchangeStorage + extends AutoCloseable +{ + ExchangeReader createExchangeReader(Queue sourceFiles, int maxPageSize); + + ExchangeWriter createExchangeWriter(URI file, SecretKey secretKey); + + ListenableFuture createEmptyFile(URI file); + + ListenableFuture deleteRecursively(List directories); + + ListenableFuture> listFilesRecursively(URI dir); + + int getWriterBufferSize(); + + @Override + void close() throws IOException; +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java new file mode 100644 index 000000000..4d7f760eb --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java @@ -0,0 +1,18 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem.hdfs; + +public class ExchangeHdfsConfig +{ +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java new file mode 100644 index 000000000..331b7c352 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java @@ -0,0 +1,18 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchangefilesystem.local; + +public class ExchangeLocalConfig +{ +} diff --git a/pom.xml b/pom.xml index e99dfaef4..54b632d4f 100644 --- a/pom.xml +++ b/pom.xml @@ -157,7 +157,7 @@ hetu-greenplum hetu-clickhouse hetu-kylin - hetu-exchange-file-system + hetu-exchange-filesystem -- Gitee From cd5436d50a82007ca2b79f859f840e39c3529be0 Mon Sep 17 00:00:00 2001 From: Alex Zhang Date: Mon, 11 Jul 2022 20:24:44 +0800 Subject: [PATCH 27/30] fix code style issues --- .../filesystem}/ExchangeReader.java | 2 +- .../filesystem}/ExchangeSourceFile.java | 2 +- .../filesystem}/ExchangeWriter.java | 2 +- .../filesystem}/ExecutionStats.java | 2 +- .../filesystem}/FileStatus.java | 2 +- .../filesystem}/FileSystemExchange.java | 2 +- .../filesystem}/FileSystemExchangeConfig.java | 4 +- .../FileSystemExchangeErrorCode.java | 2 +- .../FileSystemExchangeFutures.java | 3 +- .../FileSystemExchangeManager.java | 2 +- .../FileSystemExchangeManagerFactory.java | 2 +- .../filesystem}/FileSystemExchangePlugin.java | 2 +- .../filesystem}/FileSystemExchangeSink.java | 2 +- .../FileSystemExchangeSinkHandle.java | 2 +- .../FileSystemExchangeSourceHandle.java | 2 +- .../FileSystemExchangeStorage.java | 2 +- .../filesystem}/hdfs/ExchangeHdfsConfig.java | 2 +- .../local/ExchangeLocalConfig.java | 2 +- .../metadata/ExchangeHandleResolver.java | 42 ---------- .../io/prestosql/server/PluginManager.java | 81 +++++++++++------- .../buffer/TestPartitionedOutputBuffer.java | 11 +++ .../main/java/io/prestosql/spi/Plugin.java | 84 ++++++++++++------- 22 files changed, 135 insertions(+), 122 deletions(-) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/ExchangeReader.java (95%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/ExchangeSourceFile.java (96%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/ExchangeWriter.java (94%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/ExecutionStats.java (98%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileStatus.java (97%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchange.java (97%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeConfig.java (97%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeErrorCode.java (95%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeFutures.java (94%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeManager.java (96%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeManagerFactory.java (95%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangePlugin.java (95%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeSink.java (96%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeSinkHandle.java (98%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeSourceHandle.java (97%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/FileSystemExchangeStorage.java (96%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/hdfs/ExchangeHdfsConfig.java (91%) rename hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/{exchangefilesystem => exchange/filesystem}/local/ExchangeLocalConfig.java (91%) delete mode 100644 presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeReader.java similarity index 95% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeReader.java index d683a58d9..d30eb624c 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeReader.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeReader.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.slice.Slice; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeSourceFile.java similarity index 96% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeSourceFile.java index 002d33187..f1111f1af 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeSourceFile.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeSourceFile.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import javax.annotation.concurrent.Immutable; import javax.crypto.SecretKey; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeWriter.java similarity index 94% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeWriter.java index 7dd3fff82..53ed0df40 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExchangeWriter.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExchangeWriter.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.slice.Slice; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExecutionStats.java similarity index 98% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExecutionStats.java index 934d66db5..6be996704 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/ExecutionStats.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/ExecutionStats.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.base.Stopwatch; import com.google.common.util.concurrent.FutureCallback; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileStatus.java similarity index 97% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileStatus.java index 4bfbddc93..028ccbe42 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileStatus.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileStatus.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchange.java similarity index 97% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchange.java index 0b4467e13..d00fa6029 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchange.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchange.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import io.prestosql.spi.exchange.Exchange; import io.prestosql.spi.exchange.ExchangeSinkHandle; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeConfig.java similarity index 97% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeConfig.java index 0fdbbea4a..4ee733277 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeConfig.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeConfig.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.collect.ImmutableList; import io.airlift.configuration.Config; @@ -29,7 +29,7 @@ import java.util.List; import static com.google.common.base.Strings.isNullOrEmpty; import static io.airlift.units.DataSize.Unit.GIGABYTE; import static io.airlift.units.DataSize.Unit.MEGABYTE; -import static io.hetu.core.plugin.exchangefilesystem.FileSystemExchangeManager.PATH_SEPARATOR; +import static io.hetu.core.plugin.exchange.filesystem.FileSystemExchangeManager.PATH_SEPARATOR; public class FileSystemExchangeConfig { diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeErrorCode.java similarity index 95% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeErrorCode.java index 3f0e22ceb..f72cd0f55 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeErrorCode.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeErrorCode.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import io.prestosql.spi.ErrorCode; import io.prestosql.spi.ErrorCodeSupplier; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeFutures.java similarity index 94% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeFutures.java index 09808f71c..e442229d3 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeFutures.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeFutures.java @@ -11,11 +11,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.util.concurrent.ListenableFuture; - public class FileSystemExchangeFutures { private FileSystemExchangeFutures() diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManager.java similarity index 96% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManager.java index f126aae8b..728b78d1b 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManager.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManager.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import io.prestosql.spi.exchange.Exchange; import io.prestosql.spi.exchange.ExchangeContext; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java similarity index 95% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java index dbc2e1537..cdaee371e 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeManagerFactory.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import io.prestosql.spi.exchange.ExchangeManager; import io.prestosql.spi.exchange.ExchangeManagerFactory; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangePlugin.java similarity index 95% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangePlugin.java index 80861a7bb..c93acd5f6 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangePlugin.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangePlugin.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.collect.ImmutableList; import io.prestosql.spi.Plugin; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSink.java similarity index 96% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSink.java index dd8c558c7..9489b73ab 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSink.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSink.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import io.airlift.slice.Slice; import io.prestosql.spi.exchange.ExchangeSink; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkHandle.java similarity index 98% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkHandle.java index 40a468ad0..00eb2b258 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSinkHandle.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkHandle.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.fasterxml.jackson.annotation.JsonProperty; import io.prestosql.spi.exchange.ExchangeSinkHandle; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSourceHandle.java similarity index 97% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSourceHandle.java index b343c7cbf..d4276ec67 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeSourceHandle.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSourceHandle.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStorage.java similarity index 96% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStorage.java index d73aa262c..2851fdbc9 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/FileSystemExchangeStorage.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStorage.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem; +package io.hetu.core.plugin.exchange.filesystem; import com.google.common.util.concurrent.ListenableFuture; diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/ExchangeHdfsConfig.java similarity index 91% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/ExchangeHdfsConfig.java index 4d7f760eb..b667bc3e3 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/hdfs/ExchangeHdfsConfig.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/ExchangeHdfsConfig.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem.hdfs; +package io.hetu.core.plugin.exchange.filesystem.hdfs; public class ExchangeHdfsConfig { diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/ExchangeLocalConfig.java similarity index 91% rename from hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java rename to hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/ExchangeLocalConfig.java index 331b7c352..9d7241c23 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchangefilesystem/local/ExchangeLocalConfig.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/ExchangeLocalConfig.java @@ -11,7 +11,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package io.hetu.core.plugin.exchangefilesystem.local; +package io.hetu.core.plugin.exchange.filesystem.local; public class ExchangeLocalConfig { diff --git a/presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java b/presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java deleted file mode 100644 index debcda388..000000000 --- a/presto-main/src/main/java/io/prestosql/metadata/ExchangeHandleResolver.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package io.prestosql.metadata; - -import io.prestosql.spi.exchange.ExchangeManagerHandleResolver; -import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; -import io.prestosql.spi.exchange.ExchangeSourceHandle; - -import java.util.concurrent.atomic.AtomicReference; - -import static com.google.common.base.Preconditions.checkState; - -public class ExchangeHandleResolver { - private final AtomicReference exchangeManagerHandleResolver = new AtomicReference<>(); - - public void setExchangeManagerHandleResolver(ExchangeManagerHandleResolver resolver) { - checkState(exchangeManagerHandleResolver.compareAndSet(null, resolver), "ExchangeManagerHandleResolver is already set"); - } - - public Class getExchangeSinkInstanceHandleClass() { - ExchangeManagerHandleResolver resolver = exchangeManagerHandleResolver.get(); - checkState(resolver != null, "ExchangeManagerHandleResolver is not set"); - return resolver.getExchangeSinkInstanceHandleClass(); - } - - public Class getExchangeSourceHandleClass() { - ExchangeManagerHandleResolver resolver = exchangeManagerHandleResolver.get(); - checkState(resolver != null, "ExchangeManagerHandleResolver is not set"); - return resolver.getExchangeSourceHandleClass(); - } -} diff --git a/presto-main/src/main/java/io/prestosql/server/PluginManager.java b/presto-main/src/main/java/io/prestosql/server/PluginManager.java index 6339fdc2b..545a8e6f1 100644 --- a/presto-main/src/main/java/io/prestosql/server/PluginManager.java +++ b/presto-main/src/main/java/io/prestosql/server/PluginManager.java @@ -84,7 +84,8 @@ import static io.prestosql.server.PluginDiscovery.writePluginServices; import static java.util.Objects.requireNonNull; @ThreadSafe -public class PluginManager { +public class PluginManager +{ private static final ImmutableList SPI_PACKAGES = ImmutableList.builder() .add("io.hetu.core.spi.") .add("io.prestosql.spi.") @@ -147,7 +148,8 @@ public class PluginManager { HetuMetaStoreManager hetuMetaStoreManager, HeuristicIndexerManager heuristicIndexerManager, FailureDetectorManager failureDetectorManager, - ExchangeManagerRegistry exchangeManagerRegistry) { + ExchangeManagerRegistry exchangeManagerRegistry) + { requireNonNull(nodeInfo, "nodeInfo is null"); requireNonNull(config, "config is null"); this.config = config; @@ -156,7 +158,8 @@ public class PluginManager { externalFunctionsPluginsDir = config.getExternalFunctionsPluginsDir(); if (config.getPlugins() == null) { this.plugins = ImmutableList.of(); - } else { + } + else { this.plugins = ImmutableList.copyOf(config.getPlugins()); } this.resolver = new ArtifactResolver(config.getMavenLocalRepository(), config.getMavenRemoteRepository()); @@ -181,8 +184,28 @@ public class PluginManager { this.exchangeManagerRegistry = requireNonNull(exchangeManagerRegistry, "exchangeManagerRegistry is null"); } + private static List listFiles(File installedPluginsDir) + { + if (installedPluginsDir != null && installedPluginsDir.isDirectory()) { + File[] files = installedPluginsDir.listFiles(); + if (files != null) { + Arrays.sort(files); + return ImmutableList.copyOf(files); + } + } + return ImmutableList.of(); + } + + private static List sortedArtifacts(List artifacts) + { + List list = new ArrayList<>(artifacts); + Collections.sort(list, Ordering.natural().nullsLast().onResultOf(Artifact::getFile)); + return list; + } + public void loadPlugins() - throws Exception { + throws Exception + { if (!pluginsLoading.compareAndSet(false, true)) { return; } @@ -209,12 +232,14 @@ public class PluginManager { } private void loadPlugin(String plugin) - throws Exception { + throws Exception + { loadPlugin(plugin, false); } private void loadPlugin(String plugin, boolean onlyInstallFunctionsPlugin) - throws Exception { + throws Exception + { log.info("-- Loading plugin %s --", plugin); URLClassLoader pluginClassLoader = buildClassLoader(plugin); try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(pluginClassLoader)) { @@ -223,7 +248,8 @@ public class PluginManager { log.info("-- Finished loading plugin %s --", plugin); } - private void loadPlugin(URLClassLoader pluginClassLoader, boolean onlyInstallFunctionsPlugin) { + private void loadPlugin(URLClassLoader pluginClassLoader, boolean onlyInstallFunctionsPlugin) + { ServiceLoader serviceLoader = ServiceLoader.load(Plugin.class, pluginClassLoader); List pluginSet = ImmutableList.copyOf(serviceLoader); checkState(!pluginSet.isEmpty(), "No service providers of type %s", Plugin.class.getName()); @@ -244,7 +270,8 @@ public class PluginManager { } } - private void installFunctionsPlugin(Plugin plugin) { + private void installFunctionsPlugin(Plugin plugin) + { for (Class functionClass : plugin.getFunctions()) { log.info("Registering functions from %s", functionClass.getName()); metadataManager.getFunctionAndTypeManager().registerBuiltInFunctions(extractFunctions(functionClass)); @@ -256,7 +283,8 @@ public class PluginManager { } } - public void installPlugin(Plugin plugin) { + public void installPlugin(Plugin plugin) + { for (BlockEncoding blockEncoding : plugin.getBlockEncodings()) { log.info("Registering block encoding %s", blockEncoding.getName()); metadataManager.getFunctionAndTypeManager().addBlockEncoding(blockEncoding); @@ -364,7 +392,8 @@ public class PluginManager { } private URLClassLoader buildClassLoader(String plugin) - throws Exception { + throws Exception + { File file = new File(plugin); if (file.isFile() && (file.getName().equals("pom.xml") || file.getName().endsWith(".pom"))) { return buildClassLoaderFromPom(file); @@ -376,7 +405,8 @@ public class PluginManager { } private URLClassLoader buildClassLoaderFromPom(File pomFile) - throws Exception { + throws Exception + { List artifacts = resolver.resolvePom(pomFile); URLClassLoader classLoader = createClassLoader(artifacts, pomFile.getPath()); @@ -390,7 +420,8 @@ public class PluginManager { } private URLClassLoader buildClassLoaderFromDirectory(File dir) - throws Exception { + throws Exception + { log.debug("Classpath for %s:", dir.getName()); List urls = new ArrayList<>(); for (File file : listFiles(dir)) { @@ -401,14 +432,16 @@ public class PluginManager { } private URLClassLoader buildClassLoaderFromCoordinates(String coordinates) - throws Exception { + throws Exception + { Artifact rootArtifact = new DefaultArtifact(coordinates); List artifacts = resolver.resolveArtifacts(rootArtifact); return createClassLoader(artifacts, rootArtifact.toString()); } private URLClassLoader createClassLoader(List artifacts, String name) - throws IOException { + throws IOException + { log.debug("Classpath for %s:", name); List urls = new ArrayList<>(); for (Artifact artifact : sortedArtifacts(artifacts)) { @@ -422,25 +455,9 @@ public class PluginManager { return createClassLoader(urls); } - private URLClassLoader createClassLoader(List urls) { + private URLClassLoader createClassLoader(List urls) + { ClassLoader parent = getClass().getClassLoader(); return new PluginClassLoader(urls, parent, SPI_PACKAGES); } - - private static List listFiles(File installedPluginsDir) { - if (installedPluginsDir != null && installedPluginsDir.isDirectory()) { - File[] files = installedPluginsDir.listFiles(); - if (files != null) { - Arrays.sort(files); - return ImmutableList.copyOf(files); - } - } - return ImmutableList.of(); - } - - private static List sortedArtifacts(List artifacts) { - List list = new ArrayList<>(artifacts); - Collections.sort(list, Ordering.natural().nullsLast().onResultOf(Artifact::getFile)); - return list; - } } diff --git a/presto-main/src/test/java/io/prestosql/execution/buffer/TestPartitionedOutputBuffer.java b/presto-main/src/test/java/io/prestosql/execution/buffer/TestPartitionedOutputBuffer.java index 76d9a937f..2fd357304 100644 --- a/presto-main/src/test/java/io/prestosql/execution/buffer/TestPartitionedOutputBuffer.java +++ b/presto-main/src/test/java/io/prestosql/execution/buffer/TestPartitionedOutputBuffer.java @@ -26,6 +26,7 @@ import io.prestosql.snapshot.RecoveryUtils; import io.prestosql.snapshot.SnapshotStateId; import io.prestosql.snapshot.TaskSnapshotManager; import io.prestosql.spi.Page; +import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; import io.prestosql.spi.snapshot.MarkerPage; import io.prestosql.spi.snapshot.SnapshotTestUtil; import io.prestosql.spi.type.BigintType; @@ -1074,4 +1075,14 @@ public class TestPartitionedOutputBuffer List pages = ImmutableList.builder().add(firstPage).add(otherPages).build(); return createBufferResult(token, pages); } + + @Test + public void testBuffersWithExchangeSink() + { + OutputBuffers buffers = createInitialEmptyOutputBuffers(PARTITIONED); + assertFalse(buffers.getExchangeSinkInstanceHandle().isPresent()); + + buffers.setExchangeSinkInstanceHandle(new ExchangeSinkInstanceHandle() {}); + assertTrue(buffers.getExchangeSinkInstanceHandle().isPresent()); + } } diff --git a/presto-spi/src/main/java/io/prestosql/spi/Plugin.java b/presto-spi/src/main/java/io/prestosql/spi/Plugin.java index 376cc9f0a..dd1809ced 100644 --- a/presto-spi/src/main/java/io/prestosql/spi/Plugin.java +++ b/presto-spi/src/main/java/io/prestosql/spi/Plugin.java @@ -42,108 +42,136 @@ import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.emptySet; -public interface Plugin { - default Iterable getConnectorFactories() { +public interface Plugin +{ + default Iterable getConnectorFactories() + { return emptyList(); } - default Iterable getBlockEncodings() { + default Iterable getBlockEncodings() + { return emptyList(); } - default Iterable getTypes() { + default Iterable getTypes() + { return emptyList(); } - default Iterable getParametricTypes() { + default Iterable getParametricTypes() + { return emptyList(); } - default Set> getFunctions() { + default Set> getFunctions() + { return emptySet(); } - default Set getDynamicHiveFunctions() { + default Set getDynamicHiveFunctions() + { return emptySet(); } - default Iterable getSystemAccessControlFactories() { + default Iterable getSystemAccessControlFactories() + { return emptyList(); } - default Iterable getGroupProviderFactories() { + default Iterable getGroupProviderFactories() + { return emptyList(); } - default Iterable getPasswordAuthenticatorFactories() { + default Iterable getPasswordAuthenticatorFactories() + { return emptyList(); } - default Iterable getEventListenerFactories() { + default Iterable getEventListenerFactories() + { return emptyList(); } - default Iterable getResourceGroupConfigurationManagerFactories() { + default Iterable getResourceGroupConfigurationManagerFactories() + { return emptyList(); } - default Iterable getSessionPropertyConfigurationManagerFactories() { + default Iterable getSessionPropertyConfigurationManagerFactories() + { return emptyList(); } - default Iterable getStateStoreFactories() { + default Iterable getStateStoreFactories() + { return emptyList(); } - default Iterable getStateStoreBootstrappers() { + default Iterable getStateStoreBootstrappers() + { return emptyList(); } - default Iterable getSeedStoreFactories() { + default Iterable getSeedStoreFactories() + { return emptyList(); } - default Iterable getCubeProviders() { + default Iterable getCubeProviders() + { return emptyList(); } - default Iterable getFileSystemClientFactory() { + default Iterable getFileSystemClientFactory() + { return emptyList(); } - default Iterable getFailureRetryFactory() { + default Iterable getFailureRetryFactory() + { return emptyList(); } - default Iterable getHetuMetaStoreFactories() { + default Iterable getHetuMetaStoreFactories() + { return emptyList(); } - default Iterable getIndexFactories() { + default Iterable getIndexFactories() + { return emptyList(); } - default Optional getConnectorWithProperties() { + default Optional getConnectorWithProperties() + { return Optional.empty(); } - default void setExternalFunctionsDir(File externalFuncsDir) { + default void setExternalFunctionsDir(File externalFuncsDir) + { } - default void setMaxFunctionRunningTimeEnable(boolean enable) { + default void setMaxFunctionRunningTimeEnable(boolean enable) + { } - default void setMaxFunctionRunningTimeInSec(long time) { + default void setMaxFunctionRunningTimeInSec(long time) + { } - default void setFunctionRunningThreadPoolSize(int size) { + default void setFunctionRunningThreadPoolSize(int size) + { } - default Iterable getFunctionNamespaceManagerFactories() { + default Iterable getFunctionNamespaceManagerFactories() + { return emptyList(); } - default Iterable getExchangeManagerFactories() { + default Iterable getExchangeManagerFactories() + { return emptyList(); } } -- Gitee From ab12b7ded5774c74f721b4bdfb227327ec4e490d Mon Sep 17 00:00:00 2001 From: zhangfuqiang Date: Tue, 12 Jul 2022 14:00:55 +0800 Subject: [PATCH 28/30] fix build issues - remove used undeclared dependencies, unused declared dependencies --- hetu-exchange-filesystem/pom.xml | 200 +++---------------------------- 1 file changed, 15 insertions(+), 185 deletions(-) diff --git a/hetu-exchange-filesystem/pom.xml b/hetu-exchange-filesystem/pom.xml index eeece0c03..c2884eafa 100644 --- a/hetu-exchange-filesystem/pom.xml +++ b/hetu-exchange-filesystem/pom.xml @@ -16,40 +16,36 @@ 2.17.2 13.0 2.0.1.Final - 3.10 1.10.19 6.10 - 2.2.3 0.8.2 3.1.1 - org.codehaus.jettison - jettison - - - stax-api - stax - - + com.google.code.findbugs + jsr305 + true - - commons-codec - commons-codec - runtime + javax.validation + validation-api - - io.hetu.core - hetu-common + org.checkerframework + checker-qual + 2.5.2 + + + org.openjdk.jol + jol-core - io.prestosql.hadoop - hadoop-apache + commons-codec + commons-codec + runtime @@ -87,156 +83,15 @@ io.airlift configuration - - io.airlift - log - - - io.airlift - bootstrap - - - log4j-over-slf4j - org.slf4j - - - slf4j-jdk14 - org.slf4j - - - - - io.airlift - concurrent - io.airlift stats - - javax.inject - javax.inject - com.google.guava guava - - com.google.inject - guice - - - - org.apache.commons - commons-lang3 - ${version.commons-lang3} - - - - org.apache.hbase - hbase-client - ${version.hbase} - - - error_prone_annotations - com.google.errorprone - - - jcodings - org.jruby.jcodings - - - hadoop-common - org.apache.hadoop - - - hadoop-auth - org.apache.hadoop - - - hbase-shaded-netty - org.apache.hbase.thirdparty - - - - - hbase-shaded-netty - org.apache.hbase.thirdparty - 4.0.0 - runtime - - - org.apache.hbase - hbase-server - ${version.hbase} - - - jersey-server - org.glassfish.jersey.core - - - jersey-container-servlet-core - org.glassfish.jersey.containers - - - jaxb-api - javax.xml.bind - - - hk2-locator - org.glassfish.hk2 - - - jersey-common - org.glassfish.jersey.core - - - hadoop-hdfs - org.apache.hadoop - - - hadoop-common - org.apache.hadoop - - - hadoop-mapreduce-client-core - org.apache.hadoop - - - hadoop-auth - org.apache.hadoop - - - hadoop-annotations - org.apache.hadoop - - - commons-logging - commons-logging - - - org.apache.hadoop - hadoop-distcp - - - jetty-util-ajax - org.eclipse.jetty - - - jetty-webapp - org.eclipse.jetty - - - log4j - log4j - - - slf4j-log4j12 - org.slf4j - - - org.apache.logging.log4j log4j-api @@ -260,11 +115,6 @@ hadoop-distcp runtime - - org.apache.hbase - hbase-protocol-shaded - ${version.hbase} - jackson-databind @@ -277,26 +127,6 @@ runtime - - org.apache.hbase - hbase-common - ${version.hbase} - - - hadoop-common - org.apache.hadoop - - - log4j - log4j - - - slf4j-log4j12 - org.slf4j - - - - org.testng -- Gitee From f6407188f1b9e394d63bb03a9889ec205930ef8b Mon Sep 17 00:00:00 2001 From: zhangfuqiang Date: Tue, 12 Jul 2022 14:01:55 +0800 Subject: [PATCH 29/30] fix build issues - add ExchangeManagerRegistry for instantiate PluginManager --- .../src/main/java/io/prestosql/testing/LocalQueryRunner.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/presto-main/src/main/java/io/prestosql/testing/LocalQueryRunner.java b/presto-main/src/main/java/io/prestosql/testing/LocalQueryRunner.java index 47450f16b..6eb67453b 100644 --- a/presto-main/src/main/java/io/prestosql/testing/LocalQueryRunner.java +++ b/presto-main/src/main/java/io/prestosql/testing/LocalQueryRunner.java @@ -50,6 +50,7 @@ import io.prestosql.cost.TaskCountEstimator; import io.prestosql.cube.CubeManager; import io.prestosql.dynamicfilter.DynamicFilterCacheManager; import io.prestosql.eventlistener.EventListenerManager; +import io.prestosql.exchange.ExchangeManagerRegistry; import io.prestosql.execution.CommentTask; import io.prestosql.execution.CommitTask; import io.prestosql.execution.CreateTableTask; @@ -128,6 +129,7 @@ import io.prestosql.spi.Plugin; import io.prestosql.spi.connector.CatalogName; import io.prestosql.spi.connector.ConnectorFactory; import io.prestosql.spi.connector.QualifiedObjectName; +import io.prestosql.spi.exchange.ExchangeHandleResolver; import io.prestosql.spi.failuredetector.FailureRetryPolicy; import io.prestosql.spi.metadata.TableHandle; import io.prestosql.spi.operator.ReuseExchangeOperator; @@ -455,7 +457,8 @@ public class LocalQueryRunner fileSystemClientManager, hetuMetaStoreManager, heuristicIndexerManager, - failureDetectorManager); + failureDetectorManager, + new ExchangeManagerRegistry(new ExchangeHandleResolver())); connectorManager.addConnectorFactory(globalSystemConnectorFactory); connectorManager.createConnection(GlobalSystemConnector.NAME, GlobalSystemConnector.NAME, ImmutableMap.of()); -- Gitee From 314e226e614140734a0f29c574ae8dbd3123a851 Mon Sep 17 00:00:00 2001 From: zhangfuqiang Date: Wed, 13 Jul 2022 09:48:59 +0800 Subject: [PATCH 30/30] Exchange Filesystem plugin --- hetu-exchange-filesystem/pom.xml | 42 ++++----- .../FileSystemExchangeManagerFactory.java | 37 +++++++- .../filesystem/FileSystemExchangeModule.java | 58 +++++++++++++ .../FileSystemExchangeSinkInstanceHandle.java | 21 +++++ .../filesystem/FileSystemExchangeStats.java | 86 +++++++++++++++++++ .../hdfs/HdfsFileSystemExchangeStorage.java | 73 ++++++++++++++++ .../local/LocalFileSystemExchangeStorage.java | 73 ++++++++++++++++ hetu-server/src/main/provisio/hetu.xml | 5 ++ .../prestosql/metadata/HandleJsonModule.java | 2 + presto-plugin-toolkit/pom.xml | 4 + .../base/jmx/ObjectNameGeneratorConfig.java | 33 +++++++ .../jmx/PrefixObjectNameGeneratorModule.java | 82 ++++++++++++++++++ 12 files changed, 488 insertions(+), 28 deletions(-) create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeModule.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkInstanceHandle.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStats.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/HdfsFileSystemExchangeStorage.java create mode 100644 hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/LocalFileSystemExchangeStorage.java create mode 100644 presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java create mode 100644 presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java diff --git a/hetu-exchange-filesystem/pom.xml b/hetu-exchange-filesystem/pom.xml index c2884eafa..27853241c 100644 --- a/hetu-exchange-filesystem/pom.xml +++ b/hetu-exchange-filesystem/pom.xml @@ -10,6 +10,7 @@ 4.0.0 hetu-exchange-filesystem + hetu-plugin ${project.parent.basedir} @@ -41,7 +42,6 @@ org.openjdk.jol jol-core - commons-codec commons-codec @@ -87,10 +87,18 @@ io.airlift stats + + io.airlift + bootstrap + com.google.guava guava + + com.google.inject + guice + org.apache.logging.log4j @@ -157,12 +165,6 @@ - - io.hetu.core - presto-plugin-toolkit - test - - io.hetu.core hetu-metastore @@ -181,31 +183,21 @@ ${version.mockito-all} test + + io.hetu.core + presto-plugin-toolkit + - org.jacoco - jacoco-maven-plugin - ${version.jacoco-maven-plugin} + org.gaul + modernizer-maven-plugin + 1.7.1 - false + true - - - - prepare-agent - - - - report - test - - report - - - diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java index cdaee371e..92b5cc77e 100644 --- a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeManagerFactory.java @@ -13,30 +13,61 @@ */ package io.hetu.core.plugin.exchange.filesystem; +import com.google.inject.Injector; +import io.airlift.bootstrap.Bootstrap; +import io.prestosql.plugin.base.jmx.MBeanServerModule; +import io.prestosql.plugin.base.jmx.PrefixObjectNameGeneratorModule; import io.prestosql.spi.exchange.ExchangeManager; import io.prestosql.spi.exchange.ExchangeManagerFactory; import io.prestosql.spi.exchange.ExchangeManagerHandleResolver; +import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; +import io.prestosql.spi.exchange.ExchangeSourceHandle; +import org.weakref.jmx.guice.MBeanModule; import java.util.Map; +import static java.util.Objects.requireNonNull; + public class FileSystemExchangeManagerFactory implements ExchangeManagerFactory { @Override public String getName() { - return null; + return "filesystem"; } @Override public ExchangeManager create(Map config) { - return null; + requireNonNull(config, "config is null"); + Bootstrap app = new Bootstrap( + new MBeanModule(), + new MBeanServerModule(), + new PrefixObjectNameGeneratorModule("io.hetu.core.plugin.exchange.filesystem", "hetu.core.plugin.exchange.filesystem"), + new FileSystemExchangeModule()); + Injector injector = app.doNotInitializeLogging() + .setRequiredConfigurationProperties(config) + .initialize(); + return injector.getInstance(FileSystemExchangeManager.class); } @Override public ExchangeManagerHandleResolver getHandleResolver() { - return null; + return new ExchangeManagerHandleResolver() + { + @Override + public Class getExchangeSinkInstanceHandleClass() + { + return FileSystemExchangeSinkInstanceHandle.class; + } + + @Override + public Class getExchangeSourceHandleHandleClass() + { + return FileSystemExchangeSourceHandle.class; + } + }; } } diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeModule.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeModule.java new file mode 100644 index 000000000..fb8e23062 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeModule.java @@ -0,0 +1,58 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchange.filesystem; + +import com.google.common.collect.ImmutableSet; +import com.google.inject.Binder; +import com.google.inject.Scopes; +import io.airlift.configuration.AbstractConfigurationAwareModule; +import io.hetu.core.plugin.exchange.filesystem.hdfs.HdfsFileSystemExchangeStorage; +import io.hetu.core.plugin.exchange.filesystem.local.LocalFileSystemExchangeStorage; +import io.prestosql.spi.PrestoException; + +import java.net.URI; +import java.util.List; + +import static io.prestosql.spi.StandardErrorCode.CONFIGURATION_INVALID; +import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; +import static java.lang.String.format; +import static org.weakref.jmx.guice.ExportBinder.newExporter; + +public class FileSystemExchangeModule + extends AbstractConfigurationAwareModule +{ + @Override + protected void setup(Binder binder) + { + binder.bind(FileSystemExchangeStats.class).in(Scopes.SINGLETON); + newExporter(binder).export(FileSystemExchangeStats.class).withGeneratedName(); + + binder.bind(FileSystemExchangeManager.class).in(Scopes.SINGLETON); + + List baseDirectories = buildConfigObject(FileSystemExchangeConfig.class).getBaseDirectories(); + if (baseDirectories.stream().map(URI::getScheme).distinct().count() != 1) { + throw new PrestoException(CONFIGURATION_INVALID, "Multiple schemes in exchange base directories"); + } + String scheme = baseDirectories.get(0).getScheme(); + if (scheme == null || scheme.equals("file")) { + binder.bind(FileSystemExchangeStorage.class).to(LocalFileSystemExchangeStorage.class).in(Scopes.SINGLETON); + } + else if (ImmutableSet.of("hdfs").contains(scheme)) { + binder.bind(FileSystemExchangeStorage.class).to(HdfsFileSystemExchangeStorage.class).in(Scopes.SINGLETON); + } + else { + throw new PrestoException(NOT_SUPPORTED, format("Scheme %s is not supported as exchange storage", scheme)); + } + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkInstanceHandle.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkInstanceHandle.java new file mode 100644 index 000000000..603f7fb34 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeSinkInstanceHandle.java @@ -0,0 +1,21 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchange.filesystem; + +import io.prestosql.spi.exchange.ExchangeSinkInstanceHandle; + +public class FileSystemExchangeSinkInstanceHandle + implements ExchangeSinkInstanceHandle +{ +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStats.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStats.java new file mode 100644 index 000000000..b0bd83156 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/FileSystemExchangeStats.java @@ -0,0 +1,86 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchange.filesystem; + +import io.airlift.stats.DistributionStat; +import org.weakref.jmx.Managed; +import org.weakref.jmx.Nested; + +public class FileSystemExchangeStats +{ + private final ExecutionStats createExchangeSourceHandles = new ExecutionStats(); + private final ExecutionStats getCommittedPartitions = new ExecutionStats(); + private final ExecutionStats closeExchange = new ExecutionStats(); + private final ExecutionStats exchangeSinkBlocked = new ExecutionStats(); + private final ExecutionStats exchangeSinkFinished = new ExecutionStats(); + private final ExecutionStats exchangeSinkAborted = new ExecutionStats(); + private final ExecutionStats exchangeSourceBlocked = new ExecutionStats(); + private final DistributionStat fileSizeInBytes = new DistributionStat(); + + @Managed + @Nested + public ExecutionStats getCreateExchangeSourceHandles() + { + return createExchangeSourceHandles; + } + + @Managed + @Nested + public ExecutionStats getGetCommittedPartitions() + { + return getCommittedPartitions; + } + + @Managed + @Nested + public ExecutionStats getCloseExchange() + { + return closeExchange; + } + + @Managed + @Nested + public ExecutionStats getExchangeSinkBlocked() + { + return exchangeSinkBlocked; + } + + @Managed + @Nested + public ExecutionStats getExchangeSinkFinished() + { + return exchangeSinkFinished; + } + + @Managed + @Nested + public ExecutionStats getExchangeSinkAborted() + { + return exchangeSinkAborted; + } + + @Managed + @Nested + public ExecutionStats getExchangeSourceBlocked() + { + return exchangeSourceBlocked; + } + + @Managed + @Nested + public DistributionStat getFileSizeInBytes() + { + return fileSizeInBytes; + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/HdfsFileSystemExchangeStorage.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/HdfsFileSystemExchangeStorage.java new file mode 100644 index 000000000..28b748177 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/hdfs/HdfsFileSystemExchangeStorage.java @@ -0,0 +1,73 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchange.filesystem.hdfs; + +import com.google.common.util.concurrent.ListenableFuture; +import io.hetu.core.plugin.exchange.filesystem.ExchangeReader; +import io.hetu.core.plugin.exchange.filesystem.ExchangeSourceFile; +import io.hetu.core.plugin.exchange.filesystem.ExchangeWriter; +import io.hetu.core.plugin.exchange.filesystem.FileStatus; +import io.hetu.core.plugin.exchange.filesystem.FileSystemExchangeStorage; + +import javax.crypto.SecretKey; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Queue; + +public class HdfsFileSystemExchangeStorage + implements FileSystemExchangeStorage +{ + @Override + public ExchangeReader createExchangeReader(Queue sourceFiles, int maxPageSize) + { + return null; + } + + @Override + public ExchangeWriter createExchangeWriter(URI file, SecretKey secretKey) + { + return null; + } + + @Override + public ListenableFuture createEmptyFile(URI file) + { + return null; + } + + @Override + public ListenableFuture deleteRecursively(List directories) + { + return null; + } + + @Override + public ListenableFuture> listFilesRecursively(URI dir) + { + return null; + } + + @Override + public int getWriterBufferSize() + { + return 0; + } + + @Override + public void close() throws IOException + { + } +} diff --git a/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/LocalFileSystemExchangeStorage.java b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/LocalFileSystemExchangeStorage.java new file mode 100644 index 000000000..814f329d6 --- /dev/null +++ b/hetu-exchange-filesystem/src/main/java/io/hetu/core/plugin/exchange/filesystem/local/LocalFileSystemExchangeStorage.java @@ -0,0 +1,73 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.hetu.core.plugin.exchange.filesystem.local; + +import com.google.common.util.concurrent.ListenableFuture; +import io.hetu.core.plugin.exchange.filesystem.ExchangeReader; +import io.hetu.core.plugin.exchange.filesystem.ExchangeSourceFile; +import io.hetu.core.plugin.exchange.filesystem.ExchangeWriter; +import io.hetu.core.plugin.exchange.filesystem.FileStatus; +import io.hetu.core.plugin.exchange.filesystem.FileSystemExchangeStorage; + +import javax.crypto.SecretKey; + +import java.io.IOException; +import java.net.URI; +import java.util.List; +import java.util.Queue; + +public class LocalFileSystemExchangeStorage + implements FileSystemExchangeStorage +{ + @Override + public ExchangeReader createExchangeReader(Queue sourceFiles, int maxPageSize) + { + return null; + } + + @Override + public ExchangeWriter createExchangeWriter(URI file, SecretKey secretKey) + { + return null; + } + + @Override + public ListenableFuture createEmptyFile(URI file) + { + return null; + } + + @Override + public ListenableFuture deleteRecursively(List directories) + { + return null; + } + + @Override + public ListenableFuture> listFilesRecursively(URI dir) + { + return null; + } + + @Override + public int getWriterBufferSize() + { + return 0; + } + + @Override + public void close() throws IOException + { + } +} diff --git a/hetu-server/src/main/provisio/hetu.xml b/hetu-server/src/main/provisio/hetu.xml index d61f35f6b..f5ea79328 100644 --- a/hetu-server/src/main/provisio/hetu.xml +++ b/hetu-server/src/main/provisio/hetu.xml @@ -327,4 +327,9 @@ + + + + + diff --git a/presto-main/src/main/java/io/prestosql/metadata/HandleJsonModule.java b/presto-main/src/main/java/io/prestosql/metadata/HandleJsonModule.java index 18b1797b5..28de72069 100644 --- a/presto-main/src/main/java/io/prestosql/metadata/HandleJsonModule.java +++ b/presto-main/src/main/java/io/prestosql/metadata/HandleJsonModule.java @@ -29,6 +29,7 @@ import io.prestosql.spi.connector.ConnectorTableLayoutHandle; import io.prestosql.spi.connector.ConnectorTransactionHandle; import io.prestosql.spi.connector.ConnectorUpdateTableHandle; import io.prestosql.spi.connector.ConnectorVacuumTableHandle; +import io.prestosql.spi.exchange.ExchangeHandleResolver; public class HandleJsonModule implements Module @@ -37,6 +38,7 @@ public class HandleJsonModule public void configure(Binder binder) { binder.bind(HandleResolver.class).in(Scopes.SINGLETON); + binder.bind(ExchangeHandleResolver.class).in(Scopes.SINGLETON); } @ProvidesIntoSet diff --git a/presto-plugin-toolkit/pom.xml b/presto-plugin-toolkit/pom.xml index 5c767718b..f0fb5828e 100644 --- a/presto-plugin-toolkit/pom.xml +++ b/presto-plugin-toolkit/pom.xml @@ -104,5 +104,9 @@ assertj-core test + + org.weakref + jmxutils + diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java new file mode 100644 index 000000000..1b6ccd204 --- /dev/null +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/ObjectNameGeneratorConfig.java @@ -0,0 +1,33 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.base.jmx; + +import io.airlift.configuration.Config; + +public class ObjectNameGeneratorConfig +{ + private String domainBase; + + public String getDomainBase() + { + return domainBase; + } + + @Config("jmx.base-name") + public ObjectNameGeneratorConfig setDomainBase(String domainBase) + { + this.domainBase = domainBase; + return this; + } +} diff --git a/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java new file mode 100644 index 000000000..f9d16c0e9 --- /dev/null +++ b/presto-plugin-toolkit/src/main/java/io/prestosql/plugin/base/jmx/PrefixObjectNameGeneratorModule.java @@ -0,0 +1,82 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.prestosql.plugin.base.jmx; + +import com.google.inject.Binder; +import com.google.inject.Module; +import com.google.inject.Provides; +import org.weakref.jmx.ObjectNameBuilder; +import org.weakref.jmx.ObjectNameGenerator; + +import java.util.Map; + +import static com.google.common.base.MoreObjects.firstNonNull; +import static io.airlift.configuration.ConfigBinder.configBinder; +import static java.util.Objects.requireNonNull; + +public class PrefixObjectNameGeneratorModule + implements Module +{ + private final String packageName; + private final String defaultDomainBase; + + public PrefixObjectNameGeneratorModule(String packageName, String defaultDomainBase) + { + this.packageName = requireNonNull(packageName, "packageName is null"); + this.defaultDomainBase = requireNonNull(defaultDomainBase, "defaultDomainBase is null"); + } + + @Override + public void configure(Binder binder) + { + configBinder(binder).bindConfig(ObjectNameGeneratorConfig.class); + } + + @Provides + public ObjectNameGenerator createPrefixObjectNameGenerator(ObjectNameGeneratorConfig config) + { + String domainBase = firstNonNull(config.getDomainBase(), defaultDomainBase); + return new PrefixObjectNameGenerator(packageName, domainBase); + } + + public static final class PrefixObjectNameGenerator + implements ObjectNameGenerator + { + private final String packageName; + private final String domainBase; + + public PrefixObjectNameGenerator(String packageName, String domainBase) + { + this.packageName = requireNonNull(packageName, "packageName is null"); + this.domainBase = requireNonNull(domainBase, "domainBase is null"); + } + + @Override + public String generatedNameOf(Class type, Map properties) + { + return new ObjectNameBuilder(toDomain(type)) + .withProperties(properties) + .build(); + } + + private String toDomain(Class type) + { + String domain = type.getPackage().getName(); + if (domain.startsWith(packageName)) { + domain = domainBase + domain.substring(packageName.length()); + } + return domain; + } + } +} -- Gitee