From e05e6284524d7f46b7c33a898b907a9b573b6f7b Mon Sep 17 00:00:00 2001 From: zhousipei Date: Wed, 3 Aug 2022 16:25:19 +0800 Subject: [PATCH] add operator benchmark --- .../hetu/olk/block/BenchmarkOlkBlock.java | 154 +++++ .../hetu/olk/block/BenchmarkOmniBlock.java | 156 +++++ .../AbstractOperatorBenchmarkContext.java | 294 +++++++++ .../BenchmarkBuildOffHeapOmniOperator.java | 128 ++++ .../BenchmarkBuildOnHeapOmniOperator.java | 115 ++++ .../BenchmarkDistinctLimitOlkOperator.java | 140 +++++ .../BenchmarkDistinctLimitOmniOperator.java | 142 +++++ .../BenchmarkFilterAndProjectOlkOperator.java | 300 +++++++++ ...BenchmarkFilterAndProjectOmniOperator.java | 304 +++++++++ .../BenchmarkHashAggregationOlkOperator.java | 355 +++++++++++ .../BenchmarkHashAggregationOmniOperator.java | 391 ++++++++++++ .../BenchmarkHashJoinOlkOperators.java | 590 +++++++++++++++++ .../BenchmarkHashJoinOmniOperators.java | 592 ++++++++++++++++++ .../benchmark/BenchmarkLimitOlkOperator.java | 133 ++++ .../benchmark/BenchmarkLimitOmniOperator.java | 134 ++++ .../BenchmarkOlkFilterAndProject.java | 189 ++++++ .../BenchmarkOmniFilterAndProject.java | 198 ++++++ .../BenchmarkOrderByOlkOperator.java | 156 +++++ .../BenchmarkOrderByOmniOperator.java | 151 +++++ .../operator/benchmark/BenchmarkRunner.java | 57 ++ .../benchmark/BenchmarkTopNOlkOperator.java | 156 +++++ .../benchmark/BenchmarkTopNOmniOperator.java | 150 +++++ .../benchmark/BenchmarkWindowOlkOperator.java | 254 ++++++++ .../BenchmarkWindowOmniOperator.java | 265 ++++++++ .../olk/operator/benchmark/BlockUtil.java | 32 +- .../operator/benchmark/PageBuilderUtil.java | 9 + 26 files changed, 5543 insertions(+), 2 deletions(-) create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOlkBlock.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOmniBlock.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/AbstractOperatorBenchmarkContext.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOffHeapOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOnHeapOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOlkOperators.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOmniOperators.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOlkFilterAndProject.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOmniFilterAndProject.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkRunner.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOmniOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOlkOperator.java create mode 100644 omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOmniOperator.java diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOlkBlock.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOlkBlock.java new file mode 100644 index 000000000..c8c3502d8 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOlkBlock.java @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.block; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.benchmark.PageBuilderUtil; +import nova.hetu.olk.tool.BlockUtils; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static org.openjdk.jmh.annotations.Level.Invocation; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkOlkBlock +{ + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("long", ImmutableList.of(BIGINT)) + .put("int", ImmutableList.of(INTEGER)).put("double", ImmutableList.of(DOUBLE)) + .put("varchar", ImmutableList.of(createVarcharType(50))).build(); + + private static final int TOTAL_PAGES = 100; + private static final int ROWS_PER_PAGE = 10000; + private static final int[] POSITIONS; + + static { + POSITIONS = new int[ROWS_PER_PAGE / 2]; + for (int i = 0; i < POSITIONS.length; i++) { + POSITIONS[i] = i * 2; + } + } + + @State(Thread) + public static class Context + { + @Param({"int", "long", "double", "varchar"}) + String dataType; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + private List pages; + + private List> newBlocks; + + @Setup(Invocation) + public void setup() + { + pages = generateTestData(); + newBlocks = new LinkedList<>(); + } + + @TearDown(Invocation) + public void cleanup() + { + for (Page page : pages) { + BlockUtils.freePage(page); + } + + for (Block block : newBlocks) { + block.close(); + } + } + + private List generateTestData() + { + List typesArray = INPUT_TYPES.get(dataType); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return pages; + } + + public List getPages() + { + return pages; + } + } + + @Benchmark + public List> blockCopyPositions(Context context) + { + for (Page page : context.getPages()) { + int positionCount = page.getPositionCount(); + int channelCount = page.getChannelCount(); + for (int i = 0; i < channelCount; i++) { + Block block = page.getBlock(i).copyPositions(POSITIONS, 0, positionCount / 2); + context.newBlocks.add(block); + } + } + return context.newBlocks; + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkOlkBlock.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOmniBlock.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOmniBlock.java new file mode 100644 index 000000000..346f35bd6 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/block/BenchmarkOmniBlock.java @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.block; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.benchmark.PageBuilderUtil; +import nova.hetu.olk.tool.BlockUtils; +import nova.hetu.olk.tool.OperatorUtils; +import nova.hetu.omniruntime.vector.VecAllocator; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static org.openjdk.jmh.annotations.Level.Invocation; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkOmniBlock +{ + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("long", ImmutableList.of(BIGINT)) + .put("int", ImmutableList.of(INTEGER)).put("double", ImmutableList.of(DOUBLE)) + .put("varchar", ImmutableList.of(createVarcharType(50))).build(); + + private static final int TOTAL_PAGES = 100; + private static final int ROWS_PER_PAGE = 10000; + private static final int[] POSITIONS; + + static { + POSITIONS = new int[ROWS_PER_PAGE / 2]; + for (int i = 0; i < POSITIONS.length; i++) { + POSITIONS[i] = i * 2; + } + } + + @State(Thread) + public static class Context + { + @Param({"int", "long", "double", "varchar"}) + String dataType; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + private List pages; + + private List> newBlocks; + + @Setup(Invocation) + public void setup() + { + pages = generateTestData(); + newBlocks = new LinkedList<>(); + } + + @TearDown(Invocation) + public void cleanup() + { + for (Page page : pages) { + BlockUtils.freePage(page); + } + + for (Block block : newBlocks) { + block.close(); + } + } + + private List generateTestData() + { + List typesArray = INPUT_TYPES.get(dataType); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return OperatorUtils.transferToOffHeapPages(VecAllocator.GLOBAL_VECTOR_ALLOCATOR, pages); + } + + public List getPages() + { + return pages; + } + } + + @Benchmark + public List> blockCopyPositions(Context context) + { + for (Page page : context.getPages()) { + int positionCount = page.getPositionCount(); + int channelCount = page.getChannelCount(); + for (int i = 0; i < channelCount; i++) { + Block block = page.getBlock(i).copyPositions(POSITIONS, 0, positionCount / 2); + context.newBlocks.add(block); + } + } + return context.newBlocks; + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkOmniBlock.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/AbstractOperatorBenchmarkContext.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/AbstractOperatorBenchmarkContext.java new file mode 100644 index 000000000..f1e89bacd --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/AbstractOperatorBenchmarkContext.java @@ -0,0 +1,294 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import io.airlift.units.DataSize; +import io.prestosql.execution.TaskId; +import io.prestosql.execution.TaskStateMachine; +import io.prestosql.operator.DriverContext; +import io.prestosql.operator.Operator; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.TaskContext; +import io.prestosql.spi.Page; +import io.prestosql.testing.TestingTaskContext; +import nova.hetu.olk.tool.BlockUtils; +import nova.hetu.olk.tool.VecAllocatorHelper; +import nova.hetu.omniruntime.vector.VecAllocator; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.TearDown; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import static io.airlift.concurrent.Threads.daemonThreadsNamed; +import static io.airlift.units.DataSize.Unit.GIGABYTE; +import static io.prestosql.SessionTestUtils.TEST_SESSION; +import static java.util.concurrent.Executors.newCachedThreadPool; +import static java.util.concurrent.Executors.newScheduledThreadPool; +import static nova.hetu.olk.tool.OperatorUtils.transferToOffHeapPages; +import static org.openjdk.jmh.annotations.Level.Invocation; +import static org.openjdk.jmh.annotations.Level.Trial; + +public abstract class AbstractOperatorBenchmarkContext +{ + private static final boolean INCLUDE_CREATE_OPERATOR = true; + + private static final AtomicInteger queryIdGenerator = new AtomicInteger(); + protected ExecutorService executor; + + protected ScheduledExecutorService scheduledExecutor; + + protected OperatorFactory operatorFactory; + + private List pageTemplate; + + private InvocationContext invocationContext; + + public static class InvocationContext + { + private LinkedList pages; + private DriverContext driverContext; + private LinkedList output; + private Operator operator; + } + + public void setup() + { + this.setupTrial(); + this.setupIteration(); + } + + public void cleanup() + { + this.cleanupIteration(); + this.cleanupTrial(); + } + + protected void beforeSetupTrial() + { + } + + @Setup(Trial) + public void setupTrial() + { + beforeSetupTrial(); + executor = newCachedThreadPool(daemonThreadsNamed("test-executor-%s")); + scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed("test-scheduledExecutor-%s")); + operatorFactory = createOperatorFactory(); + pageTemplate = buildPages(); + afterSetupTrial(); + } + + protected void afterSetupTrial() + { + } + + protected void beforeSetupIteration() + { + } + + @Setup(Invocation) + public void setupIteration() + { + beforeSetupIteration(); + InvocationContext invocationContext = new InvocationContext(); + invocationContext.driverContext = createTaskContext() + .addPipelineContext(0, true, true, false) + .addDriverContext(); + invocationContext.pages = new LinkedList<>(forkPages(this.pageTemplate)); + if (!INCLUDE_CREATE_OPERATOR) { + invocationContext.operator = operatorFactory.createOperator(invocationContext.driverContext); + } + this.invocationContext = invocationContext; + afterSetupIteration(); + } + + protected void afterSetupIteration() + { + } + + protected abstract List buildPages(); + + protected abstract List forkPages(List pages); + + protected abstract OperatorFactory createOperatorFactory(); + + protected TaskContext createTaskContext() + { + return createTaskContextBySizeInGigaByte(2); + } + + protected TaskContext createTaskContextBySizeInGigaByte(int gigaByte) + { + return TestingTaskContext.builder(executor, scheduledExecutor, TEST_SESSION) + .setQueryMaxMemory(new DataSize(gigaByte, GIGABYTE)).setTaskStateMachine(new TaskStateMachine(new TaskId("query", 1, queryIdGenerator.incrementAndGet()), executor)).build(); + } + + protected void beforeCleanupIteration() + { + } + + @TearDown(Invocation) + public void cleanupIteration() + { + beforeCleanupIteration(); + closeOperator(); + if (invocationContext.output != null) { + for (Page page : invocationContext.output) { + BlockUtils.freePage(page); + } + } + if (getRemainInputPages() != null) { + for (Page page : getRemainInputPages()) { + BlockUtils.freePage(page); + } + } + invocationContext.driverContext.finished(); + invocationContext.driverContext.getPipelineContext().getTaskContext().getTaskStateMachine().finished(); + invocationContext = null; + afterCleanupIteration(); + } + + protected void afterCleanupIteration() + { + } + + protected void beforeCleanupTrial() + { + } + + @TearDown(Trial) + public void cleanupTrial() + { + beforeCleanupTrial(); + executor.shutdownNow(); + scheduledExecutor.shutdownNow(); + afterCleanupTrial(); + } + + protected void afterCleanupTrial() + { + } + + public final Operator createOperator() + { + if (INCLUDE_CREATE_OPERATOR) { + invocationContext.operator = operatorFactory.createOperator(invocationContext.driverContext); + } + return invocationContext.operator; + } + + public final void closeOperator() + { + try { + invocationContext.operator.close(); + } + catch (Exception e) { + e.printStackTrace(); + } + } + + public final List getPages() + { + return pageTemplate; + } + + public final LinkedList getRemainInputPages() + { + return invocationContext.pages; + } + + public List setOutput(LinkedList output) + { + this.invocationContext.output = output; + return output; + } + + public final List doDefaultBenchMark() + { + Operator operator = createOperator(); + Iterator input = getRemainInputPages().iterator(); + LinkedList outputPages = new LinkedList<>(); + + while (input.hasNext()) { + if (!operator.needsInput() && !operator.isFinished()) { + Page output = operator.getOutput(); + if (output != null) { + outputPages.add(output); + } + } + Page next = input.next(); + if (operator.needsInput()) { + operator.addInput(next); + input.remove(); + } + } + + operator.finish(); + + do { + Page outputPage = operator.getOutput(); + if (outputPage != null) { + outputPages.add(outputPage); + } + } while (!operator.isFinished()); + + return setOutput(outputPages); + } + + abstract static class AbstractOmniOperatorBenchmarkContext + extends AbstractOperatorBenchmarkContext + { + private VecAllocator taskLevelAllocator; + + @Override + protected TaskContext createTaskContext() + { + TaskContext taskContext = super.createTaskContext(); + taskLevelAllocator = VecAllocatorHelper.createTaskLevelAllocator(taskContext); + return taskContext; + } + + @Override + protected List forkPages(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return transferToOffHeapPages(taskLevelAllocator, slicedPages); + } + } + + abstract static class AbstractOlkOperatorBenchmarkContext + extends AbstractOperatorBenchmarkContext + { + @Override + protected List forkPages(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return slicedPages; + } + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOffHeapOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOffHeapOmniOperator.java new file mode 100644 index 000000000..175ec3a36 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOffHeapOmniOperator.java @@ -0,0 +1,128 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.BuildOffHeapOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.BooleanType.BOOLEAN; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkBuildOffHeapOmniOperator +{ + public static final int TOTAL_PAGES = 100; + public static final int ROWS_PER_PAGE = 10000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("boolean", ImmutableList.of(BOOLEAN)) + .put("long", ImmutableList.of(BIGINT)).put("int", ImmutableList.of(INTEGER)) + .put("double", ImmutableList.of(DOUBLE)).put("varchar", ImmutableList.of(createVarcharType(50))) + .build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"int", "long", "double", "varchar"}) + String testGroup = "int"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + return new BuildOffHeapOmniOperator.BuildOffHeapOmniOperatorFactory(0, new PlanNodeId("test"), + INPUT_TYPES.get(testGroup)); + } + + @Override + protected List forkPages(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return slicedPages; + } + } + + @Benchmark + public List buildOffHeap(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkBuildOffHeapOmniOperator.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOnHeapOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOnHeapOmniOperator.java new file mode 100644 index 000000000..42fba13ac --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkBuildOnHeapOmniOperator.java @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.BuildOnHeapOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.BooleanType.BOOLEAN; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkBuildOnHeapOmniOperator +{ + public static final int TOTAL_PAGES = 100; + public static final int ROWS_PER_PAGE = 10000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("boolean", ImmutableList.of(BOOLEAN)) + .put("long", ImmutableList.of(BIGINT)).put("int", ImmutableList.of(INTEGER)) + .put("double", ImmutableList.of(DOUBLE)).put("varchar", ImmutableList.of(createVarcharType(50))) + .build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"int", "long", "double", "varchar"}) + String testGroup = "int"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Override + protected List buildPages() + { + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(INPUT_TYPES.get(testGroup), ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(INPUT_TYPES.get(testGroup), ROWS_PER_PAGE)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + return new BuildOnHeapOmniOperator.BuildOnHeapOmniOperatorFactory(0, new PlanNodeId("test")); + } + } + + @Benchmark + public List buildOnHeap(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkBuildOnHeapOmniOperator.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOlkOperator.java new file mode 100644 index 000000000..e80013bd1 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOlkOperator.java @@ -0,0 +1,140 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.DistinctLimitOperator; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import io.prestosql.sql.gen.JoinCompiler; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkDistinctLimitOlkOperator +{ + private static final int TOTAL_PAGES = 1000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(INTEGER)) + .put("group2", ImmutableList.of(createVarcharType(16))).put("group3", ImmutableList.of(DOUBLE)) + .put("group4", ImmutableList.of(createDecimalType())) + .put("group5", ImmutableList.of(INTEGER, createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, BIGINT, createDecimalType(), DOUBLE)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .put("group8", ImmutableList.of(INTEGER, createVarcharType(30), BIGINT, createDecimalType(), + createVarcharType(50))) + .build(); + + private static final Map> DISTINCT_CHANNELS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0)) + .put("group3", ImmutableList.of(0)).put("group4", ImmutableList.of(0)) + .put("group5", ImmutableList.of(0, 1)).put("group6", ImmutableList.of(0, 1, 2, 3)) + .put("group7", ImmutableList.of(0, 1, 2)).put("group8", ImmutableList.of(0, 1, 2, 3, 4)).build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"1", "100", "10000", "100000"}) + private String limit = "100"; + + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Param({"32", "1024"}) + public String rowsPerPageStr = "1024"; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(TOTAL_PAGES); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, + Integer.parseInt(rowsPerPageStr))); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, Integer.parseInt(rowsPerPageStr))); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + return new DistinctLimitOperator.DistinctLimitOperatorFactory(0, new PlanNodeId("test"), + INPUT_TYPES.get(testGroup), DISTINCT_CHANNELS.get(testGroup), Long.parseLong(limit), Optional.empty(), + new JoinCompiler(createTestMetadataManager())); + } + } + + @Benchmark + public List distinctLimit(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkDistinctLimitOlkOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOmniOperator.java new file mode 100644 index 000000000..24b92baf7 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkDistinctLimitOmniOperator.java @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.DistinctLimitOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkDistinctLimitOmniOperator +{ + private static final int TOTAL_PAGES = 1000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder() + .put("group1", ImmutableList.of(INTEGER)) + .put("group2", ImmutableList.of(createVarcharType(16))) + .put("group3", ImmutableList.of(DOUBLE)) + .put("group4", ImmutableList.of(createDecimalType())) + .put("group5", ImmutableList.of(INTEGER, createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, BIGINT, createDecimalType(), DOUBLE)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .put("group8", ImmutableList.of(INTEGER, createVarcharType(30), BIGINT, createDecimalType(), + createVarcharType(50))) + .build(); + + private static final Map> DISTINCT_CHANNELS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0)) + .put("group3", ImmutableList.of(0)).put("group4", ImmutableList.of(0)) + .put("group5", ImmutableList.of(0, 1)).put("group6", ImmutableList.of(0, 1, 2, 3)) + .put("group7", ImmutableList.of(0, 1, 2)).put("group8", ImmutableList.of(0, 1, 2, 3, 4)).build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"1", "100", "10000", "100000"}) + private String limit = "100"; + + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Param({"32", "1024"}) + public String rowsPerPageStr = "1024"; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(TOTAL_PAGES); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, + Integer.parseInt(rowsPerPageStr))); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, Integer.parseInt(rowsPerPageStr))); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List inputTypes = INPUT_TYPES.get(testGroup); + List distinctChannels = DISTINCT_CHANNELS.get(testGroup); + + return new DistinctLimitOmniOperator.DistinctLimitOmniOperatorFactory(0, new PlanNodeId("test"), + inputTypes, distinctChannels, Optional.empty(), Integer.parseInt(limit)); + } + } + + @Benchmark + public List distinctLimit(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkDistinctLimitOmniOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOlkOperator.java new file mode 100644 index 000000000..7c6f09f0d --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOlkOperator.java @@ -0,0 +1,300 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.airlift.units.DataSize; +import io.prestosql.Session; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.FilterAndProjectOperator.FilterAndProjectOperatorFactory; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.project.PageProcessor; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.plan.Symbol; +import io.prestosql.spi.relation.RowExpression; +import io.prestosql.spi.type.CharType; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; +import io.prestosql.sql.gen.ExpressionCompiler; +import io.prestosql.sql.gen.PageFunctionCompiler; +import io.prestosql.sql.parser.SqlParser; +import io.prestosql.sql.planner.TypeAnalyzer; +import io.prestosql.sql.planner.TypeProvider; +import io.prestosql.sql.relational.SqlToRowExpressionTranslator; +import io.prestosql.sql.tree.Expression; +import io.prestosql.testing.TestingSession; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.airlift.units.DataSize.Unit.KILOBYTE; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.operator.scalar.FunctionAssertions.createExpression; +import static io.prestosql.spi.function.FunctionKind.SCALAR; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DateType.DATE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static java.util.Locale.ENGLISH; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@SuppressWarnings({"PackageVisibleField", "FieldCanBeLocal"}) +@State(Scope.Thread) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Fork(0) +@Threads(1) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +@BenchmarkMode(Mode.AverageTime) +public class BenchmarkFilterAndProjectOlkOperator +{ + private static final VarcharType VARCHAR = VarcharType.createVarcharType(200); + private static final CharType CHAR = CharType.createCharType(200); + private static final Session TEST_SESSION = TestingSession.testSessionBuilder().build(); + private static final Metadata METADATA = createTestMetadataManager(); + private static final TypeAnalyzer TYPE_ANALYZER = new TypeAnalyzer(new SqlParser(), METADATA); + + private static final int TOTAL_POSITIONS = 1_000_000; + private static final DataSize FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_SIZE = new DataSize(500, KILOBYTE); + private static final int FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_ROW_COUNT = 256; + + static PageFunctionCompiler pageFunctionCompiler = new PageFunctionCompiler(METADATA, 0); + static ExpressionCompiler expressionCompiler = new ExpressionCompiler(METADATA, pageFunctionCompiler); + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("q1", ImmutableList.of(INTEGER, INTEGER, INTEGER, DATE)) + .put("q2", ImmutableList.of(BIGINT, BIGINT, INTEGER, INTEGER)) + .put("q3", ImmutableList.of(VARCHAR, VARCHAR, INTEGER)) + .put("q4", ImmutableList.of(VARCHAR, INTEGER, INTEGER)) + .put("q5", ImmutableList.of(CHAR, INTEGER, INTEGER)) + .put("q6", ImmutableList.of(VARCHAR, BIGINT, INTEGER)).put("q7", ImmutableList.of(VARCHAR, INTEGER)) + .put("q8", ImmutableList.of(VARCHAR, VARCHAR, BIGINT, INTEGER)) + .put("q9", ImmutableList.of(BIGINT, INTEGER, INTEGER, VARCHAR)) + .put("q10", ImmutableList.of(BIGINT, INTEGER, INTEGER, VARCHAR)).build(); + + static Map> symbolTypes = new HashMap<>(); + + static Map> sourceLayout = new HashMap<>(); + + static { + for (Entry> entry : INPUT_TYPES.entrySet()) { + List types = entry.getValue(); + Map symbolTypes = new HashMap<>(); + Map sourceLayout = new HashMap<>(); + for (int i = 0; i < types.size(); i++) { + Symbol symbol = new Symbol(types.get(i).getTypeSignature().getBase().toLowerCase(ENGLISH) + i); + symbolTypes.put(symbol, types.get(i)); + sourceLayout.put(symbol, i); + } + BenchmarkFilterAndProjectOlkOperator.symbolTypes.put(entry.getKey(), symbolTypes); + BenchmarkFilterAndProjectOlkOperator.sourceLayout.put(entry.getKey(), sourceLayout); + } + } + + @State(Thread) + public static class Context + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"q1", "q2", "q3", "q4", "q5", "q6", "q7", "q8", "q9", "q10"}) + String query; + + @Param({"32", "1024"}) + int positionsPerPage = 32; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Override + protected List buildPages() + { + List types = INPUT_TYPES.get(this.query); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_POSITIONS / positionsPerPage; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(types, positionsPerPage)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(types, positionsPerPage)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List types = INPUT_TYPES.get(this.query); + List projections = getProjections(); + + PageProcessor pageProcessor = expressionCompiler + .compilePageProcessor(Optional.of(getFilter()), projections).get(); + + return new FilterAndProjectOperatorFactory(0, new PlanNodeId("test"), + () -> pageProcessor, types, FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_SIZE, FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_ROW_COUNT); + } + + private RowExpression getFilter() + { + switch (query) { + case "q1": + return rowExpression("integer0 in (1,2) or (integer1 between 1 and 10) or integer2 in (0,1,2,3)"); + case "q2": + return rowExpression("bigint0 > 0 or integer2 = 10 or (integer3 in (1,2) or integer3 = 3)"); + case "q3": + return rowExpression( + "varchar0 in ('1','2','3','4','5','6','7','8','9','10') or varchar1 in ('1','2') or integer2 in (1,2,3,4,5)"); + case "q4": + return rowExpression("varchar0 in ('1','2','3') or integer1 = 3 or integer2 = 3"); + case "q5": + return rowExpression("char0 = '3' or integer1 >= 10 or integer2 <= 20"); + case "q6": + return rowExpression( + "varchar0 in ('1','2','3','4','5','6','7','8','9','10') or (bigint1 between 1 and 10) or integer2 in (1,2,3,4,5)"); + case "q7": + return rowExpression("integer1 between 3 and 5"); + case "q8": + return rowExpression( + "varchar0 in ('1','2','3','4','5','6','7','8','9','10') or bigint2 between 3 and 5 or integer3 = 3"); + case "q9": + return rowExpression("bigint0 between 3 and 5 or integer1 = 3 or integer2 in (1,2)"); + case "q10": + return rowExpression("bigint0 between 3 and 5 or integer1 = 3 or integer2 = 3"); + default: + throw new IllegalArgumentException("Unsupported query!"); + } + } + + private List getProjections() + { + ImmutableList.Builder builder = ImmutableList.builder(); + + switch (query) { + case "q1": { + builder.add(rowExpression("integer0")); + builder.add(rowExpression("integer1")); + builder.add(rowExpression("integer2")); + builder.add(rowExpression("date3")); + break; + } + case "q2": { + builder.add(rowExpression("bigint0 - 1")); + builder.add(rowExpression("bigint1 + 1")); + builder.add(rowExpression("integer2")); + builder.add(rowExpression("cast(integer3 as BIGINT)")); + break; + } + case "q3": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("varchar1")); + builder.add(rowExpression("cast(integer2 as BIGINT)")); + break; + } + case "q4": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("integer1")); + builder.add(rowExpression("integer2")); + break; + } + case "q5": { + builder.add(rowExpression("concat(concat('foo', char0), 'lish')")); + builder.add(rowExpression("integer1")); + builder.add(rowExpression("integer2")); + break; + } + case "q6": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("bigint1")); + builder.add(rowExpression("cast(integer2 as BIGINT)")); + break; + } + case "q7": { + builder.add(rowExpression("substr(varchar0, 1, 1)")); + builder.add(rowExpression("integer1")); + break; + } + case "q8": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("varchar1")); + builder.add(rowExpression("bigint2")); + builder.add(rowExpression("cast(integer3 as BIGINT)")); + break; + } + case "q9": { + builder.add(rowExpression("bigint0")); + builder.add(rowExpression("cast(integer1 as BIGINT)")); + builder.add(rowExpression("cast(integer2 as BIGINT)")); + builder.add(rowExpression("substr(varchar3, 1, 1)")); + break; + } + case "q10": { + builder.add(rowExpression("bigint0")); + builder.add(rowExpression("cast(integer1 as BIGINT)")); + builder.add(rowExpression("integer2")); + builder.add(rowExpression("substr(varchar3, 1, 1)")); + break; + } + default: + break; + } + return builder.build(); + } + + private RowExpression rowExpression(String value) + { + Expression expression = createExpression(value, METADATA, TypeProvider.copyOf(symbolTypes.get(query))); + return SqlToRowExpressionTranslator.translate(expression, SCALAR, + TYPE_ANALYZER.getTypes(TEST_SESSION, TypeProvider.copyOf(symbolTypes.get(query)), expression), sourceLayout.get(query), + METADATA.getFunctionAndTypeManager(), TEST_SESSION, true); + } + } + + @Benchmark + public List benchmark(Context context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL).threads(1) + .include(".*" + BenchmarkFilterAndProjectOlkOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOmniOperator.java new file mode 100644 index 000000000..b1d917115 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkFilterAndProjectOmniOperator.java @@ -0,0 +1,304 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.airlift.units.DataSize; +import io.prestosql.Session; +import io.prestosql.execution.TaskId; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.project.PageProcessor; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.plan.Symbol; +import io.prestosql.spi.relation.RowExpression; +import io.prestosql.spi.type.CharType; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; +import io.prestosql.sql.gen.PageFunctionCompiler; +import io.prestosql.sql.parser.SqlParser; +import io.prestosql.sql.planner.TypeAnalyzer; +import io.prestosql.sql.planner.TypeProvider; +import io.prestosql.sql.relational.SqlToRowExpressionTranslator; +import io.prestosql.sql.tree.Expression; +import io.prestosql.testing.TestingSession; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import nova.hetu.olk.operator.filterandproject.FilterAndProjectOmniOperator; +import nova.hetu.olk.operator.filterandproject.OmniExpressionCompiler; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.concurrent.TimeUnit; + +import static io.airlift.units.DataSize.Unit.KILOBYTE; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.operator.scalar.FunctionAssertions.createExpression; +import static io.prestosql.spi.function.FunctionKind.SCALAR; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DateType.DATE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static java.util.Locale.ENGLISH; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@SuppressWarnings({"PackageVisibleField", "FieldCanBeLocal"}) +@State(Scope.Thread) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Fork(0) +@Threads(1) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +@BenchmarkMode(Mode.AverageTime) +public class BenchmarkFilterAndProjectOmniOperator +{ + private static final VarcharType VARCHAR = VarcharType.createVarcharType(200); + private static final CharType CHAR = CharType.createCharType(200); + private static final Session TEST_SESSION = TestingSession.testSessionBuilder().build(); + private static final Metadata METADATA = createTestMetadataManager(); + private static final TypeAnalyzer TYPE_ANALYZER = new TypeAnalyzer(new SqlParser(), METADATA); + + private static final int TOTAL_POSITIONS = 1_000_000; + private static final DataSize FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_SIZE = new DataSize(500, KILOBYTE); + private static final int FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_ROW_COUNT = 256; + + static PageFunctionCompiler pageFunctionCompiler = new PageFunctionCompiler(METADATA, 0); + static OmniExpressionCompiler omniExpressionCompiler = new OmniExpressionCompiler(METADATA, pageFunctionCompiler); + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("q1", ImmutableList.of(INTEGER, INTEGER, INTEGER, DATE)) + .put("q2", ImmutableList.of(BIGINT, BIGINT, INTEGER, INTEGER)) + .put("q3", ImmutableList.of(VARCHAR, VARCHAR, INTEGER)) + .put("q4", ImmutableList.of(VARCHAR, INTEGER, INTEGER)) + .put("q5", ImmutableList.of(CHAR, INTEGER, INTEGER)) + .put("q6", ImmutableList.of(VARCHAR, BIGINT, INTEGER)).put("q7", ImmutableList.of(VARCHAR, INTEGER)) + .put("q8", ImmutableList.of(VARCHAR, VARCHAR, BIGINT, INTEGER)) + .put("q9", ImmutableList.of(BIGINT, INTEGER, INTEGER, VARCHAR)) + .put("q10", ImmutableList.of(BIGINT, INTEGER, INTEGER, VARCHAR)).build(); + + static Map> symbolTypes = new HashMap<>(); + + static Map> sourceLayout = new HashMap<>(); + + static { + for (Entry> entry : INPUT_TYPES.entrySet()) { + List types = entry.getValue(); + Map symbolTypes = new HashMap<>(); + Map sourceLayout = new HashMap<>(); + for (int i = 0; i < types.size(); i++) { + Symbol symbol = new Symbol(types.get(i).getTypeSignature().getBase().toLowerCase(ENGLISH) + i); + symbolTypes.put(symbol, types.get(i)); + sourceLayout.put(symbol, i); + } + BenchmarkFilterAndProjectOmniOperator.symbolTypes.put(entry.getKey(), symbolTypes); + BenchmarkFilterAndProjectOmniOperator.sourceLayout.put(entry.getKey(), sourceLayout); + } + } + + @State(Thread) + public static class Context + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"q1", "q2", "q3", "q4", "q5", "q6", "q7", "q8", "q9", "q10"}) + String query; + + @Param({"32", "1024"}) + int positionsPerPage = 32; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Override + protected List buildPages() + { + List types = INPUT_TYPES.get(this.query); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_POSITIONS / positionsPerPage; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(types, positionsPerPage)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(types, positionsPerPage)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List types = INPUT_TYPES.get(this.query); + + List projections = getProjections(); + PageProcessor pageProcessor = omniExpressionCompiler + .compilePageProcessor(Optional.of(getFilter()), projections, Optional.empty(), OptionalInt.empty(), + types, new TaskId("test")).get(); + return new FilterAndProjectOmniOperator.FilterAndProjectOmniOperatorFactory(0, + new PlanNodeId("test"), () -> pageProcessor, types, FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_SIZE, + FILTER_AND_PROJECT_MIN_OUTPUT_PAGE_ROW_COUNT, types); + } + + private RowExpression getFilter() + { + switch (query) { + case "q1": + return rowExpression("integer0 in (1,2) or (integer1 between 1 and 10) or integer2 in (0,1,2,3)"); + case "q2": + return rowExpression("bigint0 > 0 or integer2 = 10 or (integer3 in (1,2) or integer3 = 3)"); + case "q3": + return rowExpression( + "varchar0 in ('1','2','3','4','5','6','7','8','9','10') or varchar1 in ('1','2') or integer2 in (1,2,3,4,5)"); + case "q4": + return rowExpression("varchar0 in ('1','2','3') or integer1 = 3 or integer2 = 3"); + case "q5": + return rowExpression("char0 = '3' or integer1 >= 10 or integer2 <= 20"); + case "q6": + return rowExpression( + "varchar0 in ('1','2','3','4','5','6','7','8','9','10') or (bigint1 between 1 and 10) or integer2 in (1,2,3,4,5)"); + case "q7": + return rowExpression("integer1 between 3 and 5"); + case "q8": + return rowExpression( + "varchar0 in ('1','2','3','4','5','6','7','8','9','10') or bigint2 between 3 and 5 or integer3 = 3"); + case "q9": + return rowExpression("bigint0 between 3 and 5 or integer1 = 3 or integer2 in (1,2)"); + case "q10": + return rowExpression("bigint0 between 3 and 5 or integer1 = 3 or integer2 = 3"); + default: + throw new IllegalArgumentException("Unsupported query!"); + } + } + + private List getProjections() + { + ImmutableList.Builder builder = ImmutableList.builder(); + + switch (query) { + case "q1": { + builder.add(rowExpression("integer0")); + builder.add(rowExpression("integer1")); + builder.add(rowExpression("integer2")); + builder.add(rowExpression("date3")); + break; + } + case "q2": { + builder.add(rowExpression("bigint0 - 1")); + builder.add(rowExpression("bigint1 + 1")); + builder.add(rowExpression("integer2")); + builder.add(rowExpression("cast(integer3 as BIGINT)")); + break; + } + case "q3": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("varchar1")); + builder.add(rowExpression("cast(integer2 as BIGINT)")); + break; + } + case "q4": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("integer1")); + builder.add(rowExpression("integer2")); + break; + } + case "q5": { + builder.add(rowExpression("concat(concat('foo', char0), 'lish')")); + builder.add(rowExpression("integer1")); + builder.add(rowExpression("integer2")); + break; + } + case "q6": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("bigint1")); + builder.add(rowExpression("cast(integer2 as BIGINT)")); + break; + } + case "q7": { + builder.add(rowExpression("substr(varchar0, 1, 1)")); + builder.add(rowExpression("integer1")); + break; + } + case "q8": { + builder.add(rowExpression("varchar0")); + builder.add(rowExpression("varchar1")); + builder.add(rowExpression("bigint2")); + builder.add(rowExpression("cast(integer3 as BIGINT)")); + break; + } + case "q9": { + builder.add(rowExpression("bigint0")); + builder.add(rowExpression("cast(integer1 as BIGINT)")); + builder.add(rowExpression("cast(integer2 as BIGINT)")); + builder.add(rowExpression("substr(varchar3, 1, 1)")); + break; + } + case "q10": { + builder.add(rowExpression("bigint0")); + builder.add(rowExpression("cast(integer1 as BIGINT)")); + builder.add(rowExpression("integer2")); + builder.add(rowExpression("substr(varchar3, 1, 1)")); + break; + } + default: + break; + } + return builder.build(); + } + + private RowExpression rowExpression(String value) + { + Expression expression = createExpression(value, METADATA, TypeProvider.copyOf(symbolTypes.get(query))); + + return SqlToRowExpressionTranslator.translate(expression, SCALAR, + TYPE_ANALYZER.getTypes(TEST_SESSION, TypeProvider.copyOf(symbolTypes.get(query)), expression), sourceLayout.get(query), + METADATA.getFunctionAndTypeManager(), TEST_SESSION, true); + } + } + + @Benchmark + public List benchmark(Context context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkFilterAndProjectOmniOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOlkOperator.java new file mode 100644 index 000000000..8b82154cb --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOlkOperator.java @@ -0,0 +1,355 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.airlift.units.DataSize; +import io.prestosql.RowPagesBuilder; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.HashAggregationOperator.HashAggregationOperatorFactory; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.StreamingAggregationOperator.StreamingAggregationOperatorFactory; +import io.prestosql.operator.aggregation.InternalAggregationFunction; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.block.BlockBuilder; +import io.prestosql.spi.block.DictionaryBlock; +import io.prestosql.spi.connector.QualifiedObjectName; +import io.prestosql.spi.function.Signature; +import io.prestosql.spi.plan.AggregationNode; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; +import io.prestosql.spiller.SpillerFactory; +import io.prestosql.sql.gen.JoinCompiler; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.airlift.units.DataSize.Unit.MEGABYTE; +import static io.airlift.units.DataSize.succinctBytes; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.spi.function.FunctionKind.AGGREGATE; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static java.lang.String.format; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkHashAggregationOlkOperator +{ + private static final Metadata metadata = createTestMetadataManager(); + private static final VarcharType FIXED_WIDTH_VARCHAR = VarcharType.createVarcharType(200); + private static final InternalAggregationFunction LONG_SUM = metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature(QualifiedObjectName.valueOfDefaultFunction("sum"), + AGGREGATE, BIGINT.getTypeSignature(), BIGINT.getTypeSignature())); + private static final InternalAggregationFunction COUNT = metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature(QualifiedObjectName.valueOfDefaultFunction("count"), + AGGREGATE, BIGINT.getTypeSignature())); + + private static final Map> allTypes = new ImmutableMap.Builder>() + .put("sql2", + ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, + INTEGER, INTEGER, BIGINT)) + .put("sql4", ImmutableList.of(FIXED_WIDTH_VARCHAR, INTEGER, INTEGER, INTEGER, BIGINT)) + .put("sql6", ImmutableList.of(INTEGER, INTEGER, BIGINT)) + .put("sql7", + ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, BIGINT, BIGINT, + BIGINT, BIGINT, BIGINT)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT, BIGINT, FIXED_WIDTH_VARCHAR, BIGINT, BIGINT)).build(); + private static final Map> channels = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(0, 1, 2, 3, 4, 5, 6)).put("sql4", ImmutableList.of(0, 1, 2, 3, 4)) + .put("sql6", ImmutableList.of(0, 1, 2)).put("sql7", ImmutableList.of(0, 1, 2, 3, 4, 5, 6, 7)) + .put("sql9", ImmutableList.of(0, 1, 2, 3, 4, 5)).build(); + private static final Map> hashChannels = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(0, 1, 2, 3, 4, 5)).put("sql4", ImmutableList.of(0, 1, 2, 3)) + .put("sql6", ImmutableList.of(0, 1)).put("sql7", ImmutableList.of(0, 1, 2)) + .put("sql9", ImmutableList.of(0, 1, 2, 3)).build(); + private static final Map> hashTypes = new ImmutableMap.Builder>() + .put("sql2", + ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, + INTEGER, INTEGER)) + .put("sql4", ImmutableList.of(FIXED_WIDTH_VARCHAR, INTEGER, INTEGER, INTEGER)) + .put("sql6", ImmutableList.of(INTEGER, INTEGER)) + .put("sql7", ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT, BIGINT, FIXED_WIDTH_VARCHAR)).build(); + private static final Map> aggChannels = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(6)).put("sql4", ImmutableList.of(4)).put("sql6", ImmutableList.of(2)) + .put("sql7", ImmutableList.of(3, 4, 5, 6, 7)).put("sql9", ImmutableList.of(4, 5)).build(); + private static final Map> aggInputTypes = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(BIGINT)).put("sql4", ImmutableList.of(BIGINT)) + .put("sql6", ImmutableList.of(BIGINT)).put("sql7", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT)).build(); + private static final Map> aggOutputTypes = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(BIGINT)).put("sql4", ImmutableList.of(BIGINT)) + .put("sql6", ImmutableList.of(BIGINT)).put("sql7", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT)).build(); + private static final Map> aggFuncTypes = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of("sum")).put("sql4", ImmutableList.of("sum")) + .put("sql6", ImmutableList.of("sum")).put("sql7", ImmutableList.of("sum", "sum", "sum", "sum", "sum")) + .put("sql9", ImmutableList.of("sum", "sum")).build(); + + public static final int TOTAL_PAGES = 140; + public static final int ROWS_PER_PAGE = 10_000; + public static final String PREFIX_BASE = "A"; + + @State(Thread) + public static class Context + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"100", "1000", "10000"}) + public int rowsPerGroup = 100; + + @Param("hash") + public String operatorType; + + @Param({"false", "true"}) + public boolean isDictionary; + + @Param({"0", "50", "150"}) + public int prefixLength; + + @Param({"sql2", "sql4", "sql6", "sql7", "sql9"}) + public static String sqlId = "sql2"; + + private String genVarcharPrefix() + { + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i < prefixLength; ++i) { + stringBuilder.append(PREFIX_BASE); + } + return stringBuilder.toString(); + } + + @Override + protected List buildPages() + { + List types = allTypes.get(sqlId); + + int groupsPerPage = ROWS_PER_PAGE / rowsPerGroup; + boolean hashAggregation = operatorType.equalsIgnoreCase("hash"); + RowPagesBuilder pagesBuilder = RowPagesBuilder.rowPagesBuilder(hashAggregation, hashChannels.get(sqlId), + hashTypes.get(sqlId)); + + for (int i = 0; i < TOTAL_PAGES; i++) { + List> allBlocks = new ArrayList<>(); + for (Type type : types) { + switch (type.getTypeSignature().getBase()) { + case "varchar": + allBlocks.add(createVarcharBlock(i, groupsPerPage)); + break; + case "bigint": + allBlocks.add(createBigIntBlock(i, groupsPerPage)); + break; + case "integer": + allBlocks.add(createIntegerBlock(i, groupsPerPage)); + break; + default: + return null; + } + } + pagesBuilder.addBlocksPage(allBlocks.toArray(new Block[allBlocks.size()])); + } + return pagesBuilder.build(); + } + + private Block createVarcharBlock(int pageId, int groupsPerPage) + { + String prefix = genVarcharPrefix(); + if (!isDictionary) { + BlockBuilder blockBuilder = VARCHAR.createBlockBuilder(null, ROWS_PER_PAGE, 200); + for (int k = 0; k < groupsPerPage; k++) { + String groupKey = format(prefix + "%s", pageId * groupsPerPage + k); + repeatToStringBlock(groupKey, rowsPerGroup, blockBuilder); + } + return blockBuilder.build(); + } + else { + return createVarcharDictionary(pageId, prefix, groupsPerPage); + } + } + + private Block createBigIntBlock(int pageId, int groupsPerPage) + { + if (!isDictionary) { + BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, ROWS_PER_PAGE); + for (int k = 0; k < groupsPerPage; k++) { + long groupKey = (long) pageId * groupsPerPage + k; + repeatToLongBlock(groupKey, rowsPerGroup, blockBuilder); + } + return blockBuilder.build(); + } + else { + return createLongDictionary(pageId, groupsPerPage); + } + } + + private Block createIntegerBlock(int pageId, int groupsPerPage) + { + if (!isDictionary) { + BlockBuilder blockBuilder = INTEGER.createBlockBuilder(null, ROWS_PER_PAGE); + for (int k = 0; k < groupsPerPage; k++) { + long groupKey = (long) pageId * groupsPerPage + k; + repeatToIntegerBlock(groupKey, rowsPerGroup, blockBuilder); + } + return blockBuilder.build(); + } + else { + return createIntegerDictionary(pageId, groupsPerPage); + } + } + + @Override + protected OperatorFactory createOperatorFactory() + { + boolean hashAggregation = operatorType.equalsIgnoreCase("hash"); + + if (hashAggregation) { + return createHashAggregationOperatorFactory(); + } + else { + return createStreamingAggregationOperatorFactory(); + } + } + + private OperatorFactory createStreamingAggregationOperatorFactory() + { + return new StreamingAggregationOperatorFactory(0, new PlanNodeId("test"), + ImmutableList.of(VARCHAR), ImmutableList.of(VARCHAR), ImmutableList.of(0), AggregationNode.Step.SINGLE, + ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty()), + LONG_SUM.bind(ImmutableList.of(1), Optional.empty())), + new JoinCompiler(createTestMetadataManager())); + } + + private OperatorFactory createHashAggregationOperatorFactory() + { + JoinCompiler joinCompiler = new JoinCompiler(createTestMetadataManager()); + SpillerFactory spillerFactory = (types, localSpillContext, aggregatedMemoryContext) -> null; + + return new HashAggregationOperatorFactory(0, new PlanNodeId("test"), + allTypes.get(sqlId), channels.get(sqlId), + ImmutableList.of(), AggregationNode.Step.SINGLE, false, + ImmutableList.of(), + Optional.of(allTypes.get(sqlId).size()), Optional.empty(), 100_000, Optional.of(new DataSize(16, MEGABYTE)), false, + succinctBytes(8), succinctBytes(Integer.MAX_VALUE), spillerFactory, joinCompiler, false); + } + + private static void repeatToStringBlock(String value, int count, BlockBuilder blockBuilder) + { + for (int i = 0; i < count; i++) { + VARCHAR.writeString(blockBuilder, value); + } + } + + private static void repeatToLongBlock(long value, int count, BlockBuilder blockBuilder) + { + for (int i = 0; i < count; i++) { + BIGINT.writeLong(blockBuilder, value); + } + } + + private static void repeatToIntegerBlock(long value, int count, BlockBuilder blockBuilder) + { + for (int i = 0; i < count; i++) { + INTEGER.writeLong(blockBuilder, value); + } + } + + private static Block createVarcharDictionary(int pageId, String prefix, int groupCount) + { + BlockBuilder blockBuilder = VARCHAR.createBlockBuilder(null, groupCount); + for (int k = 0; k < groupCount; k++) { + String groupKey = format(prefix + "%s", pageId * groupCount + k); + VARCHAR.writeString(blockBuilder, groupKey); + } + int[] ids = new int[ROWS_PER_PAGE]; + for (int k = 0; k < ROWS_PER_PAGE; k++) { + ids[k] = k % groupCount; + } + return new DictionaryBlock(blockBuilder.build(), ids); + } + + private static Block createLongDictionary(int pageId, int groupCount) + { + BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, groupCount); + for (int k = 0; k < groupCount; k++) { + long groupKey = pageId * groupCount + k; + BIGINT.writeLong(blockBuilder, groupKey); + } + int[] ids = new int[ROWS_PER_PAGE]; + for (int k = 0; k < ROWS_PER_PAGE; k++) { + ids[k] = k % groupCount; + } + return new DictionaryBlock(blockBuilder.build(), ids); + } + + private static Block createIntegerDictionary(int pageId, int groupCount) + { + BlockBuilder blockBuilder = INTEGER.createBlockBuilder(null, groupCount); + for (int k = 0; k < groupCount; k++) { + int groupKey = pageId * groupCount + k; + INTEGER.writeLong(blockBuilder, groupKey); + } + int[] ids = new int[ROWS_PER_PAGE]; + for (int k = 0; k < ROWS_PER_PAGE; k++) { + ids[k] = k % groupCount; + } + return new DictionaryBlock(blockBuilder.build(), ids); + } + } + + @Benchmark + public List benchmark(Context context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkHashAggregationOlkOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOmniOperator.java new file mode 100644 index 000000000..4f24bcd87 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashAggregationOmniOperator.java @@ -0,0 +1,391 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.primitives.Ints; +import io.prestosql.RowPagesBuilder; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.StreamingAggregationOperator.StreamingAggregationOperatorFactory; +import io.prestosql.operator.aggregation.InternalAggregationFunction; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.Block; +import io.prestosql.spi.block.BlockBuilder; +import io.prestosql.spi.block.DictionaryBlock; +import io.prestosql.spi.connector.QualifiedObjectName; +import io.prestosql.spi.function.Signature; +import io.prestosql.spi.plan.AggregationNode; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; +import io.prestosql.sql.gen.JoinCompiler; +import nova.hetu.olk.operator.HashAggregationOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import nova.hetu.olk.tool.OperatorUtils; +import nova.hetu.omniruntime.constants.FunctionType; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.spi.function.FunctionKind.AGGREGATE; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.VARCHAR; +import static java.lang.String.format; +import static nova.hetu.omniruntime.constants.FunctionType.OMNI_AGGREGATION_TYPE_AVG; +import static nova.hetu.omniruntime.constants.FunctionType.OMNI_AGGREGATION_TYPE_COUNT_COLUMN; +import static nova.hetu.omniruntime.constants.FunctionType.OMNI_AGGREGATION_TYPE_MAX; +import static nova.hetu.omniruntime.constants.FunctionType.OMNI_AGGREGATION_TYPE_MIN; +import static nova.hetu.omniruntime.constants.FunctionType.OMNI_AGGREGATION_TYPE_SUM; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkHashAggregationOmniOperator +{ + private static final Metadata metadata = createTestMetadataManager(); + private static final VarcharType FIXED_WIDTH_VARCHAR = VarcharType.createVarcharType(200); + + private static final InternalAggregationFunction LONG_SUM = metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature(QualifiedObjectName.valueOfDefaultFunction("sum"), + AGGREGATE, BIGINT.getTypeSignature(), BIGINT.getTypeSignature())); + private static final InternalAggregationFunction COUNT = metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature(QualifiedObjectName.valueOfDefaultFunction("count"), + AGGREGATE, BIGINT.getTypeSignature())); + + private static final Map> allTypes = new ImmutableMap.Builder>() + .put("sql2", + ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, + INTEGER, INTEGER, BIGINT)) + .put("sql4", ImmutableList.of(FIXED_WIDTH_VARCHAR, INTEGER, INTEGER, INTEGER, BIGINT)) + .put("sql6", ImmutableList.of(INTEGER, INTEGER, BIGINT)) + .put("sql7", + ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, BIGINT, BIGINT, + BIGINT, BIGINT, BIGINT)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT, BIGINT, FIXED_WIDTH_VARCHAR, BIGINT, BIGINT)).build(); + private static final Map> channels = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(0, 1, 2, 3, 4, 5, 6)).put("sql4", ImmutableList.of(0, 1, 2, 3, 4)) + .put("sql6", ImmutableList.of(0, 1, 2)).put("sql7", ImmutableList.of(0, 1, 2, 3, 4, 5, 6, 7)) + .put("sql9", ImmutableList.of(0, 1, 2, 3, 4, 5)).build(); + private static final Map> hashChannels = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(0, 1, 2, 3, 4, 5)).put("sql4", ImmutableList.of(0, 1, 2, 3)) + .put("sql6", ImmutableList.of(0, 1)).put("sql7", ImmutableList.of(0, 1, 2)) + .put("sql9", ImmutableList.of(0, 1, 2, 3)).build(); + private static final Map> hashTypes = new ImmutableMap.Builder>() + .put("sql2", + ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, + INTEGER, INTEGER)) + .put("sql4", ImmutableList.of(FIXED_WIDTH_VARCHAR, INTEGER, INTEGER, INTEGER)) + .put("sql6", ImmutableList.of(INTEGER, INTEGER)) + .put("sql7", ImmutableList.of(FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR, FIXED_WIDTH_VARCHAR)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT, BIGINT, FIXED_WIDTH_VARCHAR)).build(); + private static final Map> aggChannels = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(6)).put("sql4", ImmutableList.of(4)).put("sql6", ImmutableList.of(2)) + .put("sql7", ImmutableList.of(3, 4, 5, 6, 7)).put("sql9", ImmutableList.of(4, 5)).build(); + private static final Map> aggInputTypes = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(BIGINT)).put("sql4", ImmutableList.of(BIGINT)) + .put("sql6", ImmutableList.of(BIGINT)).put("sql7", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT)).build(); + private static final Map> aggOutputTypes = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of(BIGINT)).put("sql4", ImmutableList.of(BIGINT)) + .put("sql6", ImmutableList.of(BIGINT)).put("sql7", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT)) + .put("sql9", ImmutableList.of(BIGINT, BIGINT)).build(); + private static final Map> aggFuncTypes = new ImmutableMap.Builder>() + .put("sql2", ImmutableList.of("sum")).put("sql4", ImmutableList.of("sum")) + .put("sql6", ImmutableList.of("sum")).put("sql7", ImmutableList.of("sum", "sum", "sum", "sum", "sum")) + .put("sql9", ImmutableList.of("sum", "sum")).build(); + + public static final int TOTAL_PAGES = 140; + public static final int ROWS_PER_PAGE = 10_000; + public static final String PREFIX_BASE = "A"; + + @State(Thread) + public static class Context + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"100", "1000", "10000"}) + public int rowsPerGroup = 100; + + @Param("hash") + public String operatorType; + + @Param({"false", "true"}) + public boolean isDictionary; + + @Param({"0", "50", "150"}) + public int prefixLength; + + @Param({"sql2", "sql4", "sql6", "sql7", "sql9"}) + public static String sqlId = "sql2"; + + private String genVarcharPrefix() + { + StringBuilder stringBuilder = new StringBuilder(); + for (int i = 0; i < prefixLength; ++i) { + stringBuilder.append(PREFIX_BASE); + } + return stringBuilder.toString(); + } + + @Override + protected List buildPages() + { + List types = allTypes.get(sqlId); + + int groupsPerPage = ROWS_PER_PAGE / rowsPerGroup; + boolean hashAggregation = operatorType.equalsIgnoreCase("hash"); + RowPagesBuilder pagesBuilder = RowPagesBuilder.rowPagesBuilder(hashAggregation, hashChannels.get(sqlId), + hashTypes.get(sqlId)); + + for (int i = 0; i < TOTAL_PAGES; i++) { + List> allBlocks = new ArrayList<>(); + for (Type type : types) { + switch (type.getTypeSignature().getBase()) { + case "varchar": + allBlocks.add(createVarcharBlock(i, groupsPerPage)); + break; + case "bigint": + allBlocks.add(createBigIntBlock(i, groupsPerPage)); + break; + case "integer": + allBlocks.add(createIntegerBlock(i, groupsPerPage)); + break; + default: + return null; + } + } + pagesBuilder.addBlocksPage(allBlocks.toArray(new Block[allBlocks.size()])); + } + return pagesBuilder.build(); + } + + private Block createVarcharBlock(int pageId, int groupsPerPage) + { + String prefix = genVarcharPrefix(); + if (!isDictionary) { + BlockBuilder blockBuilder = VARCHAR.createBlockBuilder(null, ROWS_PER_PAGE, 200); + for (int k = 0; k < groupsPerPage; k++) { + String groupKey = format(prefix + "%s", pageId * groupsPerPage + k); + repeatToStringBlock(groupKey, rowsPerGroup, blockBuilder); + } + return blockBuilder.build(); + } + else { + return createVarcharDictionary(pageId, prefix, groupsPerPage); + } + } + + private Block createBigIntBlock(int pageId, int groupsPerPage) + { + if (!isDictionary) { + BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, ROWS_PER_PAGE); + for (int k = 0; k < groupsPerPage; k++) { + long groupKey = (long) pageId * groupsPerPage + k; + repeatToLongBlock(groupKey, rowsPerGroup, blockBuilder); + } + return blockBuilder.build(); + } + else { + return createLongDictionary(pageId, groupsPerPage); + } + } + + private Block createIntegerBlock(int pageId, int groupsPerPage) + { + if (!isDictionary) { + BlockBuilder blockBuilder = INTEGER.createBlockBuilder(null, ROWS_PER_PAGE); + for (int k = 0; k < groupsPerPage; k++) { + long groupKey = (long) pageId * groupsPerPage + k; + repeatToIntegerBlock(groupKey, rowsPerGroup, blockBuilder); + } + return blockBuilder.build(); + } + else { + return createIntegerDictionary(pageId, groupsPerPage); + } + } + + private FunctionType[] transferAggType(List aggregators) + { + FunctionType[] res = new FunctionType[aggregators.size()]; + for (int i = 0; i < aggregators.size(); i++) { + // aggregator type, eg:sum,avg... + String agg = aggregators.get(i); + switch (agg) { + case "sum": + res[i] = OMNI_AGGREGATION_TYPE_SUM; + break; + case "avg": + res[i] = OMNI_AGGREGATION_TYPE_AVG; + break; + case "count": + res[i] = OMNI_AGGREGATION_TYPE_COUNT_COLUMN; + break; + case "min": + res[i] = OMNI_AGGREGATION_TYPE_MIN; + break; + case "max": + res[i] = OMNI_AGGREGATION_TYPE_MAX; + break; + default: + throw new UnsupportedOperationException("unsupported Aggregator type by OmniRuntime: " + agg); + } + } + return res; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + boolean hashAggregation = operatorType.equalsIgnoreCase("hash"); + + if (hashAggregation) { + return createHashAggregationOperatorFactory(); + } + else { + return createStreamingAggregationOperatorFactory(); + } + } + + private OperatorFactory createStreamingAggregationOperatorFactory() + { + return new StreamingAggregationOperatorFactory(0, new PlanNodeId("test"), + ImmutableList.of(VARCHAR), ImmutableList.of(VARCHAR), ImmutableList.of(0), + AggregationNode.Step.SINGLE, + ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty()), + LONG_SUM.bind(ImmutableList.of(1), Optional.empty())), + new JoinCompiler(createTestMetadataManager())); + } + + private OperatorFactory createHashAggregationOperatorFactory() + { + ImmutableList.Builder> maskChannels = ImmutableList.builder(); + for (int i = 0; i < aggFuncTypes.get(sqlId).size(); i++) { // one mask channel for each agg func + maskChannels.add(Optional.empty()); + } + return new HashAggregationOmniOperator.HashAggregationOmniOperatorFactory(0, new PlanNodeId("test"), + allTypes.get(sqlId), Ints.toArray(hashChannels.get(sqlId)), + OperatorUtils.toDataTypes(hashTypes.get(sqlId)), Ints.toArray(aggChannels.get(sqlId)), + OperatorUtils.toDataTypes(aggInputTypes.get(sqlId)), transferAggType(aggFuncTypes.get(sqlId)), + maskChannels.build(), OperatorUtils.toDataTypes(aggOutputTypes.get(sqlId)), + AggregationNode.Step.SINGLE); + } + + private static void repeatToStringBlock(String value, int count, BlockBuilder blockBuilder) + { + for (int i = 0; i < count; i++) { + VARCHAR.writeString(blockBuilder, value); + } + } + + private static void repeatToLongBlock(long value, int count, BlockBuilder blockBuilder) + { + for (int i = 0; i < count; i++) { + BIGINT.writeLong(blockBuilder, value); + } + } + + private static void repeatToIntegerBlock(long value, int count, BlockBuilder blockBuilder) + { + for (int i = 0; i < count; i++) { + INTEGER.writeLong(blockBuilder, value); + } + } + + private static Block createVarcharDictionary(int pageId, String prefix, int groupCount) + { + BlockBuilder blockBuilder = VARCHAR.createBlockBuilder(null, groupCount); + for (int k = 0; k < groupCount; k++) { + String groupKey = format(prefix + "%s", pageId * groupCount + k); + VARCHAR.writeString(blockBuilder, groupKey); + } + int[] ids = new int[ROWS_PER_PAGE]; + for (int k = 0; k < ROWS_PER_PAGE; k++) { + ids[k] = k % groupCount; + } + return new DictionaryBlock(blockBuilder.build(), ids); + } + + private static Block createLongDictionary(int pageId, int groupCount) + { + BlockBuilder blockBuilder = BIGINT.createBlockBuilder(null, groupCount); + for (int k = 0; k < groupCount; k++) { + long groupKey = pageId * groupCount + k; + BIGINT.writeLong(blockBuilder, groupKey); + } + int[] ids = new int[ROWS_PER_PAGE]; + for (int k = 0; k < ROWS_PER_PAGE; k++) { + ids[k] = k % groupCount; + } + return new DictionaryBlock(blockBuilder.build(), ids); + } + + private static Block createIntegerDictionary(int pageId, int groupCount) + { + BlockBuilder blockBuilder = INTEGER.createBlockBuilder(null, groupCount); + for (int k = 0; k < groupCount; k++) { + long groupKey = pageId * groupCount + k; + INTEGER.writeLong(blockBuilder, groupKey); + } + int[] ids = new int[ROWS_PER_PAGE]; + for (int k = 0; k < ROWS_PER_PAGE; k++) { + ids[k] = k % groupCount; + } + return new DictionaryBlock(blockBuilder.build(), ids); + } + } + + @Benchmark + public List benchmark(Context context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkHashAggregationOmniOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOlkOperators.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOlkOperators.java new file mode 100644 index 000000000..705621479 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOlkOperators.java @@ -0,0 +1,590 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.util.concurrent.ListenableFuture; +import io.prestosql.execution.Lifespan; +import io.prestosql.operator.DriverContext; +import io.prestosql.operator.HashBuilderOperator.HashBuilderOperatorFactory; +import io.prestosql.operator.JoinBridgeManager; +import io.prestosql.operator.LookupJoinOperators; +import io.prestosql.operator.LookupSourceFactory; +import io.prestosql.operator.LookupSourceProvider; +import io.prestosql.operator.Operator; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.PagesIndex; +import io.prestosql.operator.PartitionedLookupSourceFactory; +import io.prestosql.operator.TaskContext; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.BigintType; +import io.prestosql.spi.type.DecimalType; +import io.prestosql.spi.type.DoubleType; +import io.prestosql.spi.type.IntegerType; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; +import io.prestosql.spiller.SingleStreamSpillerFactory; +import io.prestosql.type.TypeUtils; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.Random; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.airlift.concurrent.MoreFutures.getFutureValue; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static io.prestosql.spiller.PartitioningSpillerFactory.unsupportedPartitioningSpillerFactory; +import static java.util.Objects.requireNonNull; +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static org.openjdk.jmh.annotations.Mode.AverageTime; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Thread) +@OutputTimeUnit(MILLISECONDS) +@BenchmarkMode(AverageTime) +@Fork(0) +@Threads(1) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkHashJoinOlkOperators +{ + private static final int HASH_BUILD_OPERATOR_ID = 1; + private static final int HASH_JOIN_OPERATOR_ID = 2; + private static final PlanNodeId TEST_PLAN_NODE_ID = new PlanNodeId("test"); + private static final LookupJoinOperators LOOKUP_JOIN_OPERATORS = new LookupJoinOperators(); + + @State(Thread) + public static class BuildContext + extends AbstractOperatorBenchmarkContext + { + protected static final int ROWS_PER_PAGE = 10240; + protected static final int BUILD_ROWS_NUMBER = 8_000_000; + protected static final String PREFIX = ""; + + protected static final Map> BUILD_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(BIGINT, createVarcharType(20))) + .put("group2", + ImmutableList.of(BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50), createVarcharType(10))) + .put("group3", ImmutableList.of(BIGINT, createVarcharType(10))) + .put("group4", ImmutableList.of(BIGINT, createVarcharType(50), createVarcharType(50))) + .put("group5", + ImmutableList.of(BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50), createVarcharType(10))) + .put("group6", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, INTEGER, INTEGER)) + .put("group7", ImmutableList.of(BIGINT)).put("group8", ImmutableList.of(BIGINT, INTEGER, INTEGER)) + .put("group9", ImmutableList.of(BIGINT)).put("group10", ImmutableList.of(BIGINT)) + .put("group11", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT, INTEGER, INTEGER)) + .put("group12", + ImmutableList.of(createVarcharType(50), createVarcharType(50), BIGINT, createVarcharType(50), + createVarcharType(50), BIGINT)) + .put("group13", ImmutableList.of(createVarcharType(50), INTEGER, BIGINT)) + .put("group14", ImmutableList.of(createDecimalType(12, 2), BIGINT, createVarcharType(50), INTEGER)) + .build(); + + protected static final Map> BUILD_OUTPUT_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(1)) + .put("group2", ImmutableList.of(1, 2, 3, 4, 5)).put("group3", ImmutableList.of(1)) + .put("group4", ImmutableList.of(1, 2)).put("group5", ImmutableList.of(1, 2, 3, 4, 5, 6)) + .put("group6", ImmutableList.of(0, 1, 3, 4, 5)).put("group7", ImmutableList.of()) + .put("group8", ImmutableList.of(1, 2)).put("group9", ImmutableList.of()) + .put("group10", ImmutableList.of()).put("group11", ImmutableList.of(0, 2, 3, 4, 5, 6)) + .put("group12", ImmutableList.of(2)).put("group13", ImmutableList.of(0, 1)) + .put("group14", ImmutableList.of(0, 2, 3)) + .build(); + + protected static final Map> BUILD_HASH_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0)) + .put("group2", ImmutableList.of(0)).put("group3", ImmutableList.of(0)) + .put("group4", ImmutableList.of(0)).put("group5", ImmutableList.of(0)) + .put("group6", ImmutableList.of(2)).put("group7", ImmutableList.of(0)) + .put("group8", ImmutableList.of(0)).put("group9", ImmutableList.of(0)) + .put("group10", ImmutableList.of(0)).put("group11", ImmutableList.of(1)) + .put("group12", ImmutableList.of(3, 0, 4, 1, 5)).put("group13", ImmutableList.of(2)) + .put("group14", ImmutableList.of(1)).build(); + + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8", "group9", "group10", + "group11", "group12", "group13", "group14"}) + protected String testGroup; + + @Param({"false", "true"}) + protected boolean isDictionaryBlocks; + + @Param({"false", "true"}) + protected boolean buildHashEnabled; + + @Param({"1", "5"}) + protected int buildRowsRepetition = 1; + + protected List buildPages = new ArrayList<>(); + protected List buildTypes; + protected List buildOutputChannels; + protected List buildJoinChannels; + protected OptionalInt buildHashChannel; + + protected JoinBridgeManager lookupSourceFactoryManager; + + @Override + protected void beforeSetupTrial() + { + buildTypes = BUILD_TYPES.get(testGroup); + buildOutputChannels = BUILD_OUTPUT_COLS.get(testGroup); + buildJoinChannels = BUILD_HASH_COLS.get(testGroup); + buildHashChannel = OptionalInt.empty(); + initializeBuildPages(); + } + + @Override + protected void beforeSetupIteration() + { + operatorFactory = createOperatorFactory(); + } + + @Override + protected List buildPages() + { + return buildPages; + } + + @Override + protected List forkPages(List pages) + { + return forkBuildPage(pages); + } + + protected List forkBuildPage(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return slicedPages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + HashBuilderOperatorFactory hashBuilderOperatorFactory = createBuildOperatorFactory(); + LOOKUP_JOIN_OPERATORS.innerJoin(HASH_JOIN_OPERATOR_ID, TEST_PLAN_NODE_ID, lookupSourceFactoryManager, + getBuildTypes(), buildJoinChannels, buildHashChannel, + Optional.of(buildOutputChannels), OptionalInt.empty(), + unsupportedPartitioningSpillerFactory()); + return hashBuilderOperatorFactory; + } + + @Override + protected void beforeCleanupIteration() + { + finishBuildHash(lookupSourceFactoryManager); + } + + protected HashBuilderOperatorFactory createBuildOperatorFactory() + { + JoinBridgeManager lookupSourceFactoryJoinBridgeManager = JoinBridgeManager + .lookupAllAtOnce(new PartitionedLookupSourceFactory(getBuildTypes(), + buildOutputChannels.stream().map(getBuildTypes()::get) + .collect(toImmutableList()), + buildJoinChannels.stream().map(getBuildTypes()::get) + .collect(toImmutableList()), + 1, requireNonNull(ImmutableMap.of(), "layout is null"), false, false)); + this.lookupSourceFactoryManager = lookupSourceFactoryJoinBridgeManager; + return new HashBuilderOperatorFactory(HASH_BUILD_OPERATOR_ID, + TEST_PLAN_NODE_ID, lookupSourceFactoryJoinBridgeManager, buildOutputChannels, + buildJoinChannels, buildHashChannel, Optional.empty(), Optional.empty(), + ImmutableList.of(), 10_000, new PagesIndex.TestingFactory(false), false, + SingleStreamSpillerFactory.unsupportedSingleStreamSpillerFactory()); + } + + public void buildHash(Operator operator, List inputPages) + { + Iterator iterator = inputPages.iterator(); + while (iterator.hasNext()) { + Page next = iterator.next(); + operator.addInput(next); + iterator.remove(); + } + operator.finish(); + } + + public void finishBuildHash(JoinBridgeManager lookupSourceFactoryManager) + { + LookupSourceFactory lookupSourceFactory = lookupSourceFactoryManager.getJoinBridge(Lifespan.taskWide()); + ListenableFuture lookupSourceProvider = lookupSourceFactory.createLookupSourceProvider(); + if (!lookupSourceProvider.isDone()) { + throw new AssertionError("Expected lookup source provider to be ready"); + } + getFutureValue(lookupSourceProvider).close(); + } + + @Override + protected TaskContext createTaskContext() + { + return createTaskContextBySizeInGigaByte(4); + } + + protected List getBuildTypes() + { + if (buildHashEnabled) { + return ImmutableList.copyOf(Iterables.concat(buildTypes, ImmutableList.of(BIGINT))); + } + return buildTypes; + } + + protected void initializeBuildPages() + { + List> columnValues = new ArrayList<>(); + for (int i = 0; i < buildTypes.size(); i++) { + List values = new ArrayList<>(); + columnValues.add(values); + } + + int maxValue = BUILD_ROWS_NUMBER / buildRowsRepetition + 40; + int rows = 0; + while (rows < BUILD_ROWS_NUMBER) { + int newRows = Math.min(BUILD_ROWS_NUMBER - rows, ROWS_PER_PAGE); + for (int i = 0; i < buildTypes.size(); i++) { + Type type = buildTypes.get(i); + List values = columnValues.get(i); + int initialValue; + if (type instanceof VarcharType) { + initialValue = (rows + 20) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof BigintType) { + initialValue = (rows + 30) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof IntegerType) { + initialValue = (rows + 40) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof DoubleType) { + initialValue = (rows + 50) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof DecimalType) { + initialValue = (rows + 60) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + } + Page page; + if (isDictionaryBlocks) { + page = PageBuilderUtil.createPageWithDictionaryBlocks(buildTypes, PREFIX, columnValues); + } + else { + page = PageBuilderUtil.createPage(buildTypes, PREFIX, columnValues); + } + buildPages.add(page); + rows += newRows; + + for (int i = 0; i < buildTypes.size(); i++) { + columnValues.get(i).clear(); + } + } + if (buildHashEnabled) { + generateHashPage(buildPages, buildTypes, buildJoinChannels); + buildHashChannel = OptionalInt.of(buildTypes.size()); + } + } + + protected void generateHashPage(List pages, List inputTypes, List hashChannels) + { + for (int i = 0; i < pages.size(); i++) { + Page page = pages.get(i); + pages.set(i, TypeUtils.getHashPage(page, inputTypes, hashChannels)); + } + } + } + + public static class JoinContext + extends BuildContext + { + protected static final int PROBE_ROWS_NUMBER = 1_400_000; + + protected static final Map> PROBE_TYPES = ImmutableMap + .>builder() + .put("group1", ImmutableList.of(BIGINT, BIGINT, BIGINT, createVarcharType(30), createVarcharType(50))) + .put("group2", ImmutableList.of(BIGINT, createVarcharType(10))) + .put("group3", + ImmutableList.of(BIGINT, BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50))) + .put("group4", ImmutableList.of(BIGINT, BIGINT, INTEGER, INTEGER, INTEGER)) + .put("group5", ImmutableList.of(BIGINT, INTEGER)) + .put("group6", ImmutableList.of(createVarcharType(60), BIGINT)) + .put("group7", ImmutableList.of(BIGINT, BIGINT, BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50))) + .put("group8", ImmutableList.of(BIGINT, BIGINT, BIGINT, INTEGER)) + .put("group9", ImmutableList.of(BIGINT, BIGINT)) + .put("group10", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT, BIGINT, INTEGER, INTEGER)) + .put("group11", ImmutableList.of(BIGINT)) + .put("group12", + ImmutableList.of(createVarcharType(50), DOUBLE, createVarcharType(50), BIGINT, + createVarcharType(50), BIGINT, createVarcharType(50), INTEGER, BIGINT)) + .put("group13", ImmutableList.of(createDecimalType(12, 2), BIGINT, BIGINT)) + .put("group14", ImmutableList.of(INTEGER, INTEGER, BIGINT)).build(); + + protected static final Map> PROBE_OUTPUT_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0, 2, 3, 4)) + .put("group2", ImmutableList.of()).put("group3", ImmutableList.of(0, 2, 3, 4, 5, 6)) + .put("group4", ImmutableList.of(0, 2, 3, 4)).put("group5", ImmutableList.of(1)) + .put("group6", ImmutableList.of(0)).put("group7", ImmutableList.of(0, 1, 3, 4, 5, 6, 7)) + .put("group8", ImmutableList.of(0, 2, 3)).put("group9", ImmutableList.of(0)) + .put("group10", ImmutableList.of(0, 1, 2, 3, 4, 6, 7)).put("group11", ImmutableList.of()) + .put("group12", ImmutableList.of(8, 2, 1, 4, 3, 7)).put("group13", ImmutableList.of(0, 2)) + .put("group14", ImmutableList.of(0, 1)).build(); + + protected static final Map> PROBE_HASH_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(1)) + .put("group2", ImmutableList.of(0)).put("group3", ImmutableList.of(1)) + .put("group4", ImmutableList.of(1)).put("group5", ImmutableList.of(0)) + .put("group6", ImmutableList.of(1)).put("group7", ImmutableList.of(2)) + .put("group8", ImmutableList.of(1)).put("group9", ImmutableList.of(1)) + .put("group10", ImmutableList.of(5)).put("group11", ImmutableList.of(0)) + .put("group12", ImmutableList.of(2, 4, 6, 0, 5)).put("group13", ImmutableList.of(1)) + .put("group14", ImmutableList.of(2)).build(); + + @Param({"0.1", "1", "2"}) + protected double matchRate = 1; + + protected List probePages = new ArrayList<>(); + protected List probeTypes; + protected List probeOutputChannels; + protected List probeJoinChannels; + protected OptionalInt probeHashChannel; + + private DriverContext buildDriverContext; + private Operator buildOperator; + + @Override + protected void beforeSetupTrial() + { + super.beforeSetupTrial(); + probeTypes = PROBE_TYPES.get(testGroup); + probeOutputChannels = PROBE_OUTPUT_COLS.get(testGroup); + probeJoinChannels = PROBE_HASH_COLS.get(testGroup); + probeHashChannel = OptionalInt.empty(); + if (buildHashEnabled) { + probeHashChannel = OptionalInt.of(probeTypes.size()); + } + initializeProbePages(); + } + + @Override + protected void beforeSetupIteration() + { + } + + @Override + protected void beforeCleanupTrial() + { + try { + buildOperator.close(); + } + catch (Exception e) { + e.printStackTrace(); + } + buildDriverContext.finished(); + buildDriverContext.getPipelineContext().getTaskContext().getTaskStateMachine().finished(); + } + + @Override + protected List buildPages() + { + return probePages; + } + + @Override + protected List forkPages(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return slicedPages; + } + + public List getProbeTypes() + { + if (buildHashEnabled) { + return ImmutableList.copyOf(Iterables.concat(probeTypes, ImmutableList.of(BIGINT))); + } + return probeTypes; + } + + protected void initializeProbePages() + { + List> columnValues = new ArrayList<>(); + for (int i = 0; i < probeTypes.size(); i++) { + List values = new ArrayList<>(); + columnValues.add(values); + } + + List initials = new ArrayList<>(); + Random random = new Random(42); + int remainingRows = PROBE_ROWS_NUMBER; + int rowsInPage = 0; + while (remainingRows > 0) { + double roll = random.nextDouble(); + + for (int i = 0; i < probeTypes.size(); i++) { + Type type = probeTypes.get(i); + if (type instanceof VarcharType) { + initials.add(20 + remainingRows); + } + else if (type instanceof BigintType) { + initials.add(30 + remainingRows); + } + else if (type instanceof IntegerType) { + initials.add(40 + remainingRows); + } + else if (type instanceof DoubleType) { + initials.add(50 + remainingRows); + } + else if (type instanceof DecimalType) { + initials.add(60 + remainingRows); + } + } + + int rowsCount = 1; + if (matchRate < 1) { + // each row has matchRate chance to join + if (roll > matchRate) { + // generate not matched row + for (int i = 0; i < initials.size(); i++) { + initials.set(i, initials.get(i) * -1); + } + } + } + else if (matchRate > 1) { + // each row has will be repeated between one and 2*matchRate times + roll = roll * 2 * matchRate + 1; + // example for matchRate == 2: + // roll is within [0, 5) range + // rowsCount is within [0, 4] range, where each value has same probability + // so expected rowsCount is 2 + rowsCount = (int) Math.floor(roll); + } + + for (int i = 0; i < rowsCount; i++) { + if (rowsInPage >= ROWS_PER_PAGE) { + Page page; + if (isDictionaryBlocks) { + // create dictionary page + page = PageBuilderUtil.createPageWithDictionaryBlocks(probeTypes, PREFIX, columnValues); + } + else { + page = PageBuilderUtil.createPage(probeTypes, PREFIX, columnValues); + } + probePages.add(page); + rowsInPage = 0; + + for (int j = 0; j < probeTypes.size(); j++) { + columnValues.get(j).clear(); + } + } + + for (int j = 0; j < probeTypes.size(); j++) { + columnValues.get(j).add(initials.get(j)); + } + --remainingRows; + rowsInPage++; + } + initials.clear(); + } + if (buildHashEnabled) { + generateHashPage(probePages, probeTypes, probeJoinChannels); + } + } + + @Override + protected TaskContext createTaskContext() + { + return createTaskContextBySizeInGigaByte(4); + } + + @Override + protected OperatorFactory createOperatorFactory() + { + HashBuilderOperatorFactory hashBuilderOperatorFactory = createBuildOperatorFactory(); + + OperatorFactory operatorFactory = LOOKUP_JOIN_OPERATORS.innerJoin(HASH_JOIN_OPERATOR_ID, TEST_PLAN_NODE_ID, lookupSourceFactoryManager, + getProbeTypes(), probeJoinChannels, probeHashChannel, + Optional.of(probeOutputChannels), OptionalInt.empty(), + unsupportedPartitioningSpillerFactory()); + buildDriverContext = super.createTaskContext().addPipelineContext(0, true, true, false) + .addDriverContext(); + buildOperator = hashBuilderOperatorFactory.createOperator(buildDriverContext); + buildHash(buildOperator, forkBuildPage(buildPages)); + finishBuildHash(lookupSourceFactoryManager); + return operatorFactory; + } + } + + @Benchmark + public JoinBridgeManager benchmarkBuildHash(BuildContext buildContext) + { + buildContext.buildHash(buildContext.createOperator(), buildContext.getRemainInputPages()); + return buildContext.lookupSourceFactoryManager; + } + + @Benchmark + public List benchmarkJoinHash(JoinContext joinContext) + { + return joinContext.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkHashJoinOlkOperators.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOmniOperators.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOmniOperators.java new file mode 100644 index 000000000..3b567945c --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkHashJoinOmniOperators.java @@ -0,0 +1,592 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Iterables; +import com.google.common.util.concurrent.ListenableFuture; +import io.prestosql.execution.Lifespan; +import io.prestosql.operator.DriverContext; +import io.prestosql.operator.JoinBridgeManager; +import io.prestosql.operator.LookupSourceFactory; +import io.prestosql.operator.LookupSourceProvider; +import io.prestosql.operator.Operator; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.PartitionedLookupSourceFactory; +import io.prestosql.operator.TaskContext; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.BigintType; +import io.prestosql.spi.type.DecimalType; +import io.prestosql.spi.type.DoubleType; +import io.prestosql.spi.type.IntegerType; +import io.prestosql.spi.type.Type; +import io.prestosql.spi.type.VarcharType; +import io.prestosql.type.TypeUtils; +import nova.hetu.olk.operator.HashBuilderOmniOperator.HashBuilderOmniOperatorFactory; +import nova.hetu.olk.operator.LookupJoinOmniOperators; +import nova.hetu.olk.tool.VecAllocatorHelper; +import nova.hetu.omniruntime.vector.VecAllocator; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.Random; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static io.airlift.concurrent.MoreFutures.getFutureValue; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static java.util.Objects.requireNonNull; +import static java.util.concurrent.TimeUnit.MILLISECONDS; +import static nova.hetu.olk.tool.OperatorUtils.transferToOffHeapPages; +import static org.openjdk.jmh.annotations.Mode.AverageTime; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Thread) +@OutputTimeUnit(MILLISECONDS) +@BenchmarkMode(AverageTime) +@Fork(0) +@Threads(1) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkHashJoinOmniOperators +{ + private static final int HASH_BUILD_OPERATOR_ID = 1; + private static final int HASH_JOIN_OPERATOR_ID = 2; + private static final PlanNodeId TEST_PLAN_NODE_ID = new PlanNodeId("test"); + + @State(Thread) + public static class BuildContext + extends AbstractOperatorBenchmarkContext + { + protected static final int ROWS_PER_PAGE = 10240; + protected static final int BUILD_ROWS_NUMBER = 8_000_000; + protected static final String PREFIX = ""; + + protected static final Map> BUILD_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(BIGINT, createVarcharType(20))) + .put("group2", + ImmutableList.of(BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50), createVarcharType(10))) + .put("group3", ImmutableList.of(BIGINT, createVarcharType(10))) + .put("group4", ImmutableList.of(BIGINT, createVarcharType(50), createVarcharType(50))) + .put("group5", + ImmutableList.of(BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50), createVarcharType(10))) + .put("group6", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, INTEGER, INTEGER)) + .put("group7", ImmutableList.of(BIGINT)).put("group8", ImmutableList.of(BIGINT, INTEGER, INTEGER)) + .put("group9", ImmutableList.of(BIGINT)).put("group10", ImmutableList.of(BIGINT)) + .put("group11", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT, INTEGER, INTEGER)) + .put("group12", + ImmutableList.of(createVarcharType(50), createVarcharType(50), BIGINT, createVarcharType(50), + createVarcharType(50), BIGINT)) + .put("group13", ImmutableList.of(createVarcharType(50), INTEGER, BIGINT)) + .put("group14", ImmutableList.of(createDecimalType(12, 2), BIGINT, createVarcharType(50), INTEGER)) + .build(); + + protected static final Map> BUILD_OUTPUT_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(1)) + .put("group2", ImmutableList.of(1, 2, 3, 4, 5)).put("group3", ImmutableList.of(1)) + .put("group4", ImmutableList.of(1, 2)).put("group5", ImmutableList.of(1, 2, 3, 4, 5, 6)) + .put("group6", ImmutableList.of(0, 1, 3, 4, 5)).put("group7", ImmutableList.of()) + .put("group8", ImmutableList.of(1, 2)).put("group9", ImmutableList.of()) + .put("group10", ImmutableList.of()).put("group11", ImmutableList.of(0, 2, 3, 4, 5, 6)) + .put("group12", ImmutableList.of(2)).put("group13", ImmutableList.of(0, 1)) + .put("group14", ImmutableList.of(0, 2, 3)) + .build(); + + protected static final Map> BUILD_HASH_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0)) + .put("group2", ImmutableList.of(0)).put("group3", ImmutableList.of(0)) + .put("group4", ImmutableList.of(0)).put("group5", ImmutableList.of(0)) + .put("group6", ImmutableList.of(2)).put("group7", ImmutableList.of(0)) + .put("group8", ImmutableList.of(0)).put("group9", ImmutableList.of(0)) + .put("group10", ImmutableList.of(0)).put("group11", ImmutableList.of(1)) + .put("group12", ImmutableList.of(3, 0, 4, 1, 5)).put("group13", ImmutableList.of(2)) + .put("group14", ImmutableList.of(1)).build(); + + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8", "group9", "group10", + "group11", "group12", "group13", "group14"}) + protected String testGroup; + + @Param({"false", "true"}) + protected boolean isDictionaryBlocks; + + @Param({"false", "true"}) + protected boolean buildHashEnabled; + + @Param({"1", "5"}) + protected int buildRowsRepetition = 1; + + protected List buildPages = new ArrayList<>(); + protected List buildTypes; + protected List buildOutputChannels; + protected List buildJoinChannels; + protected OptionalInt buildHashChannel; + + private VecAllocator buildVecAllocator; + protected JoinBridgeManager lookupSourceFactoryManager; + + @Override + protected void beforeSetupTrial() + { + buildTypes = BUILD_TYPES.get(testGroup); + buildOutputChannels = BUILD_OUTPUT_COLS.get(testGroup); + buildJoinChannels = BUILD_HASH_COLS.get(testGroup); + buildHashChannel = OptionalInt.empty(); + initializeBuildPages(); + } + + @Override + protected void beforeSetupIteration() + { + operatorFactory = createOperatorFactory(); + } + + @Override + protected List buildPages() + { + return buildPages; + } + + @Override + protected List forkPages(List pages) + { + return forkBuildPage(pages); + } + + protected List forkBuildPage(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return transferToOffHeapPages(buildVecAllocator, slicedPages); + } + + @Override + protected OperatorFactory createOperatorFactory() + { + HashBuilderOmniOperatorFactory hashBuilderOperatorFactory = createBuildOperatorFactory(); + LookupJoinOmniOperators.innerJoin(HASH_JOIN_OPERATOR_ID, TEST_PLAN_NODE_ID, lookupSourceFactoryManager, + getBuildTypes(), buildJoinChannels, buildHashChannel, + Optional.of(buildOutputChannels), OptionalInt.of(1), hashBuilderOperatorFactory); + return hashBuilderOperatorFactory; + } + + @Override + protected void beforeCleanupIteration() + { + finishBuildHash(lookupSourceFactoryManager); + } + + protected HashBuilderOmniOperatorFactory createBuildOperatorFactory() + { + JoinBridgeManager lookupSourceFactoryJoinBridgeManager = JoinBridgeManager + .lookupAllAtOnce(new PartitionedLookupSourceFactory(getBuildTypes(), + buildOutputChannels.stream().map(getBuildTypes()::get) + .collect(toImmutableList()), + buildJoinChannels.stream().map(getBuildTypes()::get) + .collect(toImmutableList()), + 1, requireNonNull(ImmutableMap.of(), "layout is null"), false, false)); + this.lookupSourceFactoryManager = lookupSourceFactoryJoinBridgeManager; + return new HashBuilderOmniOperatorFactory(HASH_BUILD_OPERATOR_ID, + TEST_PLAN_NODE_ID, lookupSourceFactoryJoinBridgeManager, getBuildTypes(), + buildOutputChannels, buildJoinChannels, buildHashChannel, Optional.empty(), Optional.empty(), + ImmutableList.of(), 1); + } + + public void buildHash(Operator operator, List inputPages) + { + Iterator iterator = inputPages.iterator(); + while (iterator.hasNext()) { + Page next = iterator.next(); + operator.addInput(next); + iterator.remove(); + } + operator.finish(); + } + + public void finishBuildHash(JoinBridgeManager lookupSourceFactoryManager) + { + LookupSourceFactory lookupSourceFactory = lookupSourceFactoryManager.getJoinBridge(Lifespan.taskWide()); + ListenableFuture lookupSourceProvider = lookupSourceFactory.createLookupSourceProvider(); + if (!lookupSourceProvider.isDone()) { + throw new AssertionError("Expected lookup source provider to be ready"); + } + getFutureValue(lookupSourceProvider).close(); + } + + @Override + protected TaskContext createTaskContext() + { + TaskContext testingTaskContext = createTaskContextBySizeInGigaByte(4); + buildVecAllocator = VecAllocatorHelper.createTaskLevelAllocator(testingTaskContext); + return testingTaskContext; + } + + protected List getBuildTypes() + { + if (buildHashEnabled) { + return ImmutableList.copyOf(Iterables.concat(buildTypes, ImmutableList.of(BIGINT))); + } + return buildTypes; + } + + protected void initializeBuildPages() + { + List> columnValues = new ArrayList<>(); + for (int i = 0; i < buildTypes.size(); i++) { + List values = new ArrayList<>(); + columnValues.add(values); + } + + int maxValue = BUILD_ROWS_NUMBER / buildRowsRepetition + 40; + int rows = 0; + while (rows < BUILD_ROWS_NUMBER) { + int newRows = Math.min(BUILD_ROWS_NUMBER - rows, ROWS_PER_PAGE); + for (int i = 0; i < buildTypes.size(); i++) { + Type type = buildTypes.get(i); + List values = columnValues.get(i); + int initialValue; + if (type instanceof VarcharType) { + initialValue = (rows + 20) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof BigintType) { + initialValue = (rows + 30) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof IntegerType) { + initialValue = (rows + 40) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof DoubleType) { + initialValue = (rows + 50) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + else if (type instanceof DecimalType) { + initialValue = (rows + 60) % maxValue; + for (int j = 0; j < newRows; j++) { + values.add(initialValue + j); + } + } + } + Page page; + if (isDictionaryBlocks) { + page = PageBuilderUtil.createPageWithDictionaryBlocks(buildTypes, PREFIX, columnValues); + } + else { + page = PageBuilderUtil.createPage(buildTypes, PREFIX, columnValues); + } + buildPages.add(page); + rows += newRows; + + for (int i = 0; i < buildTypes.size(); i++) { + columnValues.get(i).clear(); + } + } + if (buildHashEnabled) { + generateHashPage(buildPages, buildTypes, buildJoinChannels); + buildHashChannel = OptionalInt.of(buildTypes.size()); + } + } + + protected void generateHashPage(List pages, List inputTypes, List hashChannels) + { + for (int i = 0; i < pages.size(); i++) { + Page page = pages.get(i); + pages.set(i, TypeUtils.getHashPage(page, inputTypes, hashChannels)); + } + } + } + + public static class JoinContext + extends BuildContext + { + protected static final int PROBE_ROWS_NUMBER = 1_400_000; + + protected static final Map> PROBE_TYPES = ImmutableMap + .>builder() + .put("group1", ImmutableList.of(BIGINT, BIGINT, BIGINT, createVarcharType(30), createVarcharType(50))) + .put("group2", ImmutableList.of(BIGINT, createVarcharType(10))) + .put("group3", + ImmutableList.of(BIGINT, BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50))) + .put("group4", ImmutableList.of(BIGINT, BIGINT, INTEGER, INTEGER, INTEGER)) + .put("group5", ImmutableList.of(BIGINT, INTEGER)) + .put("group6", ImmutableList.of(createVarcharType(60), BIGINT)) + .put("group7", ImmutableList.of(BIGINT, BIGINT, BIGINT, INTEGER, createVarcharType(50), INTEGER, INTEGER, + createVarcharType(50))) + .put("group8", ImmutableList.of(BIGINT, BIGINT, BIGINT, INTEGER)) + .put("group9", ImmutableList.of(BIGINT, BIGINT)) + .put("group10", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT, BIGINT, BIGINT, INTEGER, INTEGER)) + .put("group11", ImmutableList.of(BIGINT)) + .put("group12", + ImmutableList.of(createVarcharType(50), DOUBLE, createVarcharType(50), BIGINT, + createVarcharType(50), BIGINT, createVarcharType(50), INTEGER, BIGINT)) + .put("group13", ImmutableList.of(createDecimalType(12, 2), BIGINT, BIGINT)) + .put("group14", ImmutableList.of(INTEGER, INTEGER, BIGINT)).build(); + + protected static final Map> PROBE_OUTPUT_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0, 2, 3, 4)) + .put("group2", ImmutableList.of()).put("group3", ImmutableList.of(0, 2, 3, 4, 5, 6)) + .put("group4", ImmutableList.of(0, 2, 3, 4)).put("group5", ImmutableList.of(1)) + .put("group6", ImmutableList.of(0)).put("group7", ImmutableList.of(0, 1, 3, 4, 5, 6, 7)) + .put("group8", ImmutableList.of(0, 2, 3)).put("group9", ImmutableList.of(0)) + .put("group10", ImmutableList.of(0, 1, 2, 3, 4, 6, 7)).put("group11", ImmutableList.of()) + .put("group12", ImmutableList.of(8, 2, 1, 4, 3, 7)).put("group13", ImmutableList.of(0, 2)) + .put("group14", ImmutableList.of(0, 1)).build(); + + protected static final Map> PROBE_HASH_COLS = ImmutableMap + .>builder().put("group1", ImmutableList.of(1)) + .put("group2", ImmutableList.of(0)).put("group3", ImmutableList.of(1)) + .put("group4", ImmutableList.of(1)).put("group5", ImmutableList.of(0)) + .put("group6", ImmutableList.of(1)).put("group7", ImmutableList.of(2)) + .put("group8", ImmutableList.of(1)).put("group9", ImmutableList.of(1)) + .put("group10", ImmutableList.of(5)).put("group11", ImmutableList.of(0)) + .put("group12", ImmutableList.of(2, 4, 6, 0, 5)).put("group13", ImmutableList.of(1)) + .put("group14", ImmutableList.of(2)).build(); + + @Param({"0.1", "1", "2"}) + protected double matchRate = 1; + + protected List probePages = new ArrayList<>(); + protected List probeTypes; + protected List probeOutputChannels; + protected List probeJoinChannels; + protected OptionalInt probeHashChannel; + + private VecAllocator probeVecAllocator; + private DriverContext buildDriverContext; + private Operator buildOperator; + + @Override + protected void beforeSetupTrial() + { + super.beforeSetupTrial(); + probeTypes = PROBE_TYPES.get(testGroup); + probeOutputChannels = PROBE_OUTPUT_COLS.get(testGroup); + probeJoinChannels = PROBE_HASH_COLS.get(testGroup); + probeHashChannel = OptionalInt.empty(); + if (buildHashEnabled) { + probeHashChannel = OptionalInt.of(probeTypes.size()); + } + initializeProbePages(); + } + + @Override + protected void beforeSetupIteration() + { + } + + @Override + protected void beforeCleanupTrial() + { + try { + buildOperator.close(); + } + catch (Exception e) { + e.printStackTrace(); + } + buildDriverContext.finished(); + buildDriverContext.getPipelineContext().getTaskContext().getTaskStateMachine().finished(); + } + + @Override + protected List buildPages() + { + return probePages; + } + + @Override + protected List forkPages(List pages) + { + List slicedPages = new ArrayList<>(pages.size()); + for (Page page : pages) { + slicedPages.add(page.getRegion(0, page.getPositionCount())); + } + return transferToOffHeapPages(probeVecAllocator, slicedPages); + } + + public List getProbeTypes() + { + if (buildHashEnabled) { + return ImmutableList.copyOf(Iterables.concat(probeTypes, ImmutableList.of(BIGINT))); + } + return probeTypes; + } + + protected void initializeProbePages() + { + List> columnValues = new ArrayList<>(); + for (int i = 0; i < probeTypes.size(); i++) { + List values = new ArrayList<>(); + columnValues.add(values); + } + + List initials = new ArrayList<>(); + Random random = new Random(42); + int remainingRows = PROBE_ROWS_NUMBER; + int rowsInPage = 0; + while (remainingRows > 0) { + double roll = random.nextDouble(); + + for (int i = 0; i < probeTypes.size(); i++) { + Type type = probeTypes.get(i); + if (type instanceof VarcharType) { + initials.add(20 + remainingRows); + } + else if (type instanceof BigintType) { + initials.add(30 + remainingRows); + } + else if (type instanceof IntegerType) { + initials.add(40 + remainingRows); + } + else if (type instanceof DoubleType) { + initials.add(50 + remainingRows); + } + else if (type instanceof DecimalType) { + initials.add(60 + remainingRows); + } + } + + int rowsCount = 1; + if (matchRate < 1) { + // each row has matchRate chance to join + if (roll > matchRate) { + // generate not matched row + for (int i = 0; i < initials.size(); i++) { + initials.set(i, initials.get(i) * -1); + } + } + } + else if (matchRate > 1) { + // each row has will be repeated between one and 2*matchRate times + roll = roll * 2 * matchRate + 1; + // example for matchRate == 2: + // roll is within [0, 5) range + // rowsCount is within [0, 4] range, where each value has same probability + // so expected rowsCount is 2 + rowsCount = (int) Math.floor(roll); + } + + for (int i = 0; i < rowsCount; i++) { + if (rowsInPage >= ROWS_PER_PAGE) { + Page page; + if (isDictionaryBlocks) { + // create dictionary page + page = PageBuilderUtil.createPageWithDictionaryBlocks(probeTypes, PREFIX, columnValues); + } + else { + page = PageBuilderUtil.createPage(probeTypes, PREFIX, columnValues); + } + probePages.add(page); + rowsInPage = 0; + + for (int j = 0; j < probeTypes.size(); j++) { + columnValues.get(j).clear(); + } + } + + for (int j = 0; j < probeTypes.size(); j++) { + columnValues.get(j).add(initials.get(j)); + } + --remainingRows; + rowsInPage++; + } + initials.clear(); + } + if (buildHashEnabled) { + generateHashPage(probePages, probeTypes, probeJoinChannels); + } + } + + @Override + protected TaskContext createTaskContext() + { + TaskContext testingTaskContext = createTaskContextBySizeInGigaByte(4); + probeVecAllocator = VecAllocatorHelper.createTaskLevelAllocator(testingTaskContext); + return testingTaskContext; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + HashBuilderOmniOperatorFactory hashBuilderOperatorFactory = createBuildOperatorFactory(); + + OperatorFactory operatorFactory = LookupJoinOmniOperators.innerJoin(HASH_JOIN_OPERATOR_ID, TEST_PLAN_NODE_ID, lookupSourceFactoryManager, + getProbeTypes(), probeJoinChannels, probeHashChannel, + Optional.of(probeOutputChannels), OptionalInt.of(1), hashBuilderOperatorFactory); + buildDriverContext = super.createTaskContext().addPipelineContext(0, true, true, false) + .addDriverContext(); + buildOperator = hashBuilderOperatorFactory.createOperator(buildDriverContext); + buildHash(buildOperator, forkBuildPage(buildPages)); + finishBuildHash(lookupSourceFactoryManager); + return operatorFactory; + } + } + + @Benchmark + public JoinBridgeManager benchmarkBuildHash(BuildContext buildContext) + { + buildContext.buildHash(buildContext.createOperator(), buildContext.getRemainInputPages()); + return buildContext.lookupSourceFactoryManager; + } + + @Benchmark + public List benchmarkJoinHash(JoinContext joinContext) + { + return joinContext.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkHashJoinOmniOperators.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOlkOperator.java new file mode 100644 index 000000000..adc17026f --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOlkOperator.java @@ -0,0 +1,133 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.LimitOperator; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkLimitOlkOperator +{ + public static final int TOTAL_PAGES = 1000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(INTEGER)) + .put("group2", ImmutableList.of(createVarcharType(16))).put("group3", ImmutableList.of(DOUBLE)) + .put("group4", ImmutableList.of(createDecimalType())) + .put("group5", ImmutableList.of(INTEGER, createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, BIGINT, createDecimalType(), DOUBLE)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .build(); + + private static final Map> SORT_CHANNELS = ImmutableMap.>builder() + .put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0)) + .put("group3", ImmutableList.of(0)).put("group4", ImmutableList.of(0)) + .put("group5", ImmutableList.of(0, 1)).put("group6", ImmutableList.of(0, 1, 2, 3)) + .put("group7", ImmutableList.of(0, 1, 2)).build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"1", "100", "1000"}) + private String limit = "100"; + + @Param({"group1", "group2", "group3"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Param({"32", "1024"}) + public String rowsPerPageStr = "1024"; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(TOTAL_PAGES); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, + Integer.parseInt(rowsPerPageStr))); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, Integer.parseInt(rowsPerPageStr))); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + return new LimitOperator.LimitOperatorFactory(0, new PlanNodeId("test"), Long.parseLong(limit)); + } + } + + @Benchmark + public List limit(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkLimitOlkOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOmniOperator.java new file mode 100644 index 000000000..6aca9f00e --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkLimitOmniOperator.java @@ -0,0 +1,134 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.LimitOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DecimalType.createDecimalType; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkLimitOmniOperator +{ + public static final int TOTAL_PAGES = 1000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(INTEGER)) + .put("group2", ImmutableList.of(createVarcharType(16))).put("group3", ImmutableList.of(DOUBLE)) + .put("group4", ImmutableList.of(createDecimalType())) + .put("group5", ImmutableList.of(INTEGER, createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, BIGINT, createDecimalType(), DOUBLE)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .build(); + + private static final Map> SORT_CHANNELS = ImmutableMap.>builder() + .put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0)) + .put("group3", ImmutableList.of(0)).put("group4", ImmutableList.of(0)) + .put("group5", ImmutableList.of(0, 1)).put("group6", ImmutableList.of(0, 1, 2, 3)) + .put("group7", ImmutableList.of(0, 1, 2)).build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"1", "100", "1000"}) + private String limit = "100"; + + @Param({"group1", "group2", "group3"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Param({"32", "1024"}) + public String rowsPerPageStr = "1024"; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(TOTAL_PAGES); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, + Integer.parseInt(rowsPerPageStr))); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, Integer.parseInt(rowsPerPageStr))); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + return new LimitOmniOperator.LimitOmniOperatorFactory(0, new PlanNodeId("test"), + Long.parseLong(limit), INPUT_TYPES.get(testGroup)); + } + } + + @Benchmark + public List limit(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkLimitOmniOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOlkFilterAndProject.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOlkFilterAndProject.java new file mode 100644 index 000000000..2bc9fc490 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOlkFilterAndProject.java @@ -0,0 +1,189 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import io.prestosql.SessionTestUtils; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.DriverYieldSignal; +import io.prestosql.operator.project.PageProcessor; +import io.prestosql.spi.Page; +import io.prestosql.spi.PageBuilder; +import io.prestosql.spi.function.BuiltInFunctionHandle; +import io.prestosql.spi.function.OperatorType; +import io.prestosql.spi.function.Signature; +import io.prestosql.spi.relation.CallExpression; +import io.prestosql.spi.relation.RowExpression; +import io.prestosql.spi.type.Type; +import io.prestosql.sql.gen.ExpressionCompiler; +import io.prestosql.sql.gen.PageFunctionCompiler; +import nova.hetu.olk.tool.BlockUtils; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.spi.function.Signature.internalOperator; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.BooleanType.BOOLEAN; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.sql.relational.Expressions.constant; +import static io.prestosql.sql.relational.Expressions.field; +import static org.openjdk.jmh.annotations.Level.Iteration; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkOlkFilterAndProject +{ + private static final int SHIP_DATE = 0; + private static final int EXTENDED_PRICE = 1; + private static final int DISCOUNT = 2; + private static final int QUANTITY = 3; + private static final long CONDITION = 10471; + private static final int PAGE_SIZE = 1024; + private static final int PAGE_COUNT = 1000; + private static final List INPUT_TYPES = ImmutableList.of(INTEGER, BIGINT, BIGINT, BIGINT); + private static final RowExpression FILTER_FOR_Q1_OMNI_FILTER = call( + internalOperator(OperatorType.LESS_THAN_OR_EQUAL, BOOLEAN.getTypeSignature(), INTEGER.getTypeSignature(), + INTEGER.getTypeSignature()), + BOOLEAN, field(SHIP_DATE, BIGINT), constant(CONDITION, BIGINT)); + private static final RowExpression PROJECT = call( + internalOperator(OperatorType.MULTIPLY, BIGINT.getTypeSignature(), BIGINT.getTypeSignature(), + BIGINT.getTypeSignature()), + BIGINT, field(EXTENDED_PRICE, BIGINT), field(DISCOUNT, BIGINT)); + private static final Metadata METADATA = createTestMetadataManager(); + private static final PageProcessor COMPILED_PROCESSOR = getCompiledProcessor(); + + @State(Scope.Thread) + public static class Context + { + @Param({"0.2", "0.4", "0.6", "0.8"}) + float selectedRatio = 0.2f; + + private List inputPages; + + private List>> result = new LinkedList<>(); + + @Setup(Iteration) + public void setup() + { + inputPages = createInputPages(PAGE_SIZE, PAGE_COUNT, selectedRatio, CONDITION); + } + + @TearDown(Iteration) + public void cleanup() + { + for (Iterator> pageIterator : result) { + while (pageIterator.hasNext()) { + Optional page = pageIterator.next(); + page.ifPresent(BlockUtils::freePage); + } + } + result = new LinkedList<>(); + } + } + + @Benchmark + public List> compileWithFilterAndProject(Context context) + { + for (Page input : context.inputPages) { + Iterator> iterator = COMPILED_PROCESSOR.process( + SessionTestUtils.TEST_SESSION.toConnectorSession(), new DriverYieldSignal(), + newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), + input); + context.result.add(iterator); + } + return ImmutableList.of(); + } + + private static PageProcessor getCompiledProcessor() + { + return new ExpressionCompiler(METADATA, new PageFunctionCompiler(METADATA, 10_000)) + .compilePageProcessor(Optional.of(FILTER_FOR_Q1_OMNI_FILTER), ImmutableList.of(PROJECT)).get(); + } + + private static RowExpression call(Signature signature, Type type, RowExpression... arguments) + { + BuiltInFunctionHandle functionHandle = new BuiltInFunctionHandle(signature); + return new CallExpression(signature.getName().getObjectName(), functionHandle, type, + Arrays.asList(arguments)); + } + + private static List createInputPages(int pageSize, int pageCount, float selectedRatio, long condition) + { + List pageContainer = new ArrayList<>(); + for (int i = 0; i < pageCount; i++) { + pageContainer.add(createInputPages(pageSize, selectedRatio, condition)); + } + return pageContainer; + } + + private static Page createInputPages(int pageSize, float selectedRatio, long condition) + { + PageBuilder pageBuilder = new PageBuilder(INPUT_TYPES); + for (int j = 0; j < pageSize; j++) { + pageBuilder.declarePosition(); + BIGINT.writeLong(pageBuilder.getBlockBuilder(EXTENDED_PRICE), j); + BIGINT.writeLong(pageBuilder.getBlockBuilder(DISCOUNT), j); + BIGINT.writeLong(pageBuilder.getBlockBuilder(QUANTITY), j); + } + int selectedCount = (int) (selectedRatio * pageSize); + for (int i = 0; i < selectedCount; i++) { + INTEGER.writeLong(pageBuilder.getBlockBuilder(SHIP_DATE), condition); + } + for (int i = selectedCount; i < pageSize; i++) { + INTEGER.writeLong(pageBuilder.getBlockBuilder(SHIP_DATE), condition + 1); + } + return pageBuilder.build(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkOlkFilterAndProject.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOmniFilterAndProject.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOmniFilterAndProject.java new file mode 100644 index 000000000..0b8347b63 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOmniFilterAndProject.java @@ -0,0 +1,198 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import io.prestosql.SessionTestUtils; +import io.prestosql.execution.TaskId; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.DriverYieldSignal; +import io.prestosql.operator.project.PageProcessor; +import io.prestosql.spi.Page; +import io.prestosql.spi.PageBuilder; +import io.prestosql.spi.function.BuiltInFunctionHandle; +import io.prestosql.spi.function.OperatorType; +import io.prestosql.spi.function.Signature; +import io.prestosql.spi.relation.CallExpression; +import io.prestosql.spi.relation.RowExpression; +import io.prestosql.spi.type.Type; +import io.prestosql.sql.gen.PageFunctionCompiler; +import nova.hetu.olk.operator.filterandproject.OmniExpressionCompiler; +import nova.hetu.olk.tool.BlockUtils; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; +import java.util.OptionalInt; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext; +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.spi.function.Signature.internalOperator; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.BooleanType.BOOLEAN; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.sql.relational.Expressions.constant; +import static io.prestosql.sql.relational.Expressions.field; +import static org.openjdk.jmh.annotations.Level.Iteration; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkOmniFilterAndProject +{ + private static final int SHIP_DATE = 0; + private static final int EXTENDED_PRICE = 1; + private static final int DISCOUNT = 2; + private static final int QUANTITY = 3; + private static final long CONDITION = 10471; + private static final int PAGE_SIZE = 1024; + private static final int PAGE_COUNT = 1000; + private static final List INPUT_TYPES = ImmutableList.of(INTEGER, BIGINT, BIGINT, BIGINT); + private static final RowExpression FILTER_FOR_Q1_OMNI_FILTER = call( + internalOperator(OperatorType.LESS_THAN_OR_EQUAL, BOOLEAN.getTypeSignature(), INTEGER.getTypeSignature(), + INTEGER.getTypeSignature()), + BOOLEAN, field(SHIP_DATE, BIGINT), constant(CONDITION, BIGINT)); + private static final RowExpression PROJECT = call( + internalOperator(OperatorType.MULTIPLY, BIGINT.getTypeSignature(), BIGINT.getTypeSignature(), + BIGINT.getTypeSignature()), + BIGINT, field(EXTENDED_PRICE, BIGINT), field(DISCOUNT, BIGINT)); + private static final Metadata METADATA = createTestMetadataManager(); + private static final PageProcessor COMPILED_PROCESSOR = getCompiledProcessor(); + + @State(Scope.Thread) + public static class Context + { + @Param({"0.2", "0.4", "0.6", "0.8"}) + float selectedRatio = 0.2f; + + private List inputPages; + + private List>> result = new LinkedList<>(); + + @Setup(Iteration) + public void setup() + { + inputPages = createInputPages(PAGE_SIZE, PAGE_COUNT, selectedRatio, CONDITION); + } + + @TearDown(Iteration) + public void cleanup() + { + for (Iterator> pageIterator : result) { + while (pageIterator.hasNext()) { + Optional page = pageIterator.next(); + page.ifPresent(BlockUtils::freePage); + } + } + result = new LinkedList<>(); + } + } + + @Benchmark + public List> compileWithFilterAndProject(Context context) + { + for (Page input : context.inputPages) { + Iterator> iterator = COMPILED_PROCESSOR.process( + SessionTestUtils.TEST_SESSION.toConnectorSession(), new DriverYieldSignal(), + newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()), + input); + context.result.add(iterator); + } + return ImmutableList.of(); + } + + private static PageProcessor getCompiledProcessor() + { + List inputTypes = new ArrayList<>(4); + inputTypes.add(INTEGER); + inputTypes.add(BIGINT); + inputTypes.add(BIGINT); + inputTypes.add(BIGINT); + return new OmniExpressionCompiler(METADATA, new PageFunctionCompiler(METADATA, 10_000)) + .compilePageProcessor(Optional.of(FILTER_FOR_Q1_OMNI_FILTER), ImmutableList.of(PROJECT), + Optional.empty(), OptionalInt.empty(), inputTypes, new TaskId("test")) + .get(); + } + + private static RowExpression call(Signature signature, Type type, RowExpression... arguments) + { + BuiltInFunctionHandle functionHandle = new BuiltInFunctionHandle(signature); + return new CallExpression(signature.getName().getObjectName(), functionHandle, type, + Arrays.asList(arguments)); + } + + private static List createInputPages(int pageSize, int pageCount, float selectedRatio, long condition) + { + List pageContainer = new ArrayList<>(); + for (int i = 0; i < pageCount; i++) { + pageContainer.add(createInputPages(pageSize, selectedRatio, condition)); + } + return pageContainer; + } + + private static Page createInputPages(int pageSize, float selectedRatio, long condition) + { + PageBuilder pageBuilder = new PageBuilder(INPUT_TYPES); + for (int j = 0; j < pageSize; j++) { + pageBuilder.declarePosition(); + BIGINT.writeLong(pageBuilder.getBlockBuilder(EXTENDED_PRICE), j); + BIGINT.writeLong(pageBuilder.getBlockBuilder(DISCOUNT), j); + BIGINT.writeLong(pageBuilder.getBlockBuilder(QUANTITY), j); + } + int selectedCount = (int) (selectedRatio * pageSize); + for (int i = 0; i < selectedCount; i++) { + INTEGER.writeLong(pageBuilder.getBlockBuilder(SHIP_DATE), condition); + } + for (int i = selectedCount; i < pageSize; i++) { + INTEGER.writeLong(pageBuilder.getBlockBuilder(SHIP_DATE), condition + 1); + } + return pageBuilder.build(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkOmniFilterAndProject.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOlkOperator.java new file mode 100644 index 000000000..8a8cf77e0 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOlkOperator.java @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.OrderByOperator.OrderByOperatorFactory; +import io.prestosql.operator.PagesIndex; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import io.prestosql.sql.gen.OrderingCompiler; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.block.SortOrder.ASC_NULLS_FIRST; +import static io.prestosql.spi.block.SortOrder.DESC_NULLS_FIRST; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkOrderByOlkOperator +{ + public static final int TOTAL_PAGES = 100; + public static final int ROWS_PER_PAGE = 10000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(createVarcharType(16))) + .put("group2", ImmutableList.of(INTEGER, INTEGER)) + .put("group3", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group4", ImmutableList.of(INTEGER, INTEGER)) + .put("group5", ImmutableList.of(createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .put("group8", ImmutableList.of(createVarcharType(50), INTEGER)) + .put("group9", + ImmutableList.of(INTEGER, createVarcharType(60), createVarcharType(20), createVarcharType(30))) + .put("group10", + ImmutableList.of(INTEGER, createVarcharType(50), INTEGER, INTEGER, createVarcharType(50))) + .build(); + + private static final Map> SORT_CHANNELS = new ImmutableMap.Builder>() + .put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0, 1)) + .put("group3", ImmutableList.of(0, 1, 2)).put("group4", ImmutableList.of(0, 1)) + .put("group5", ImmutableList.of(0)).put("group6", ImmutableList.of(0, 1, 2)) + .put("group7", ImmutableList.of(0, 1, 2)).put("group8", ImmutableList.of(0, 1)) + .put("group9", ImmutableList.of(0, 1, 2, 3)).put("group10", ImmutableList.of(0, 1, 2, 3, 4)).build(); + + private static final Map> SORT_ORDERS = new ImmutableMap.Builder>() + .put("group1", ImmutableList.of(ASC_NULLS_FIRST)) + .put("group2", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group3", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group4", ImmutableList.of(DESC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group5", ImmutableList.of(ASC_NULLS_FIRST)) + .put("group6", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group7", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group8", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group9", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group10", ImmutableList.of(DESC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST, + ASC_NULLS_FIRST)) + .build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8", "group9", "group10"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List sortChannels = SORT_CHANNELS.get(testGroup); + List outputChannels = new ArrayList<>(sortChannels); + List sortOrders = SORT_ORDERS.get(testGroup); + List totalChannels = INPUT_TYPES.get(testGroup); + return new OrderByOperatorFactory(0, new PlanNodeId("test"), totalChannels, + outputChannels, ROWS_PER_PAGE, sortChannels, sortOrders, new PagesIndex.TestingFactory(false), + false, Optional.empty(), new OrderingCompiler(), false); + } + } + + @Benchmark + public List orderBy(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkOrderByOlkOperator.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOmniOperator.java new file mode 100644 index 000000000..067147eda --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkOrderByOmniOperator.java @@ -0,0 +1,151 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.block.SortOrder.ASC_NULLS_FIRST; +import static io.prestosql.spi.block.SortOrder.DESC_NULLS_FIRST; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static nova.hetu.olk.operator.OrderByOmniOperator.OrderByOmniOperatorFactory.createOrderByOmniOperatorFactory; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkOrderByOmniOperator +{ + public static final int TOTAL_PAGES = 100; + public static final int ROWS_PER_PAGE = 10000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(createVarcharType(16))) + .put("group2", ImmutableList.of(INTEGER, INTEGER)) + .put("group3", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group4", ImmutableList.of(INTEGER, INTEGER)) + .put("group5", ImmutableList.of(createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .put("group8", ImmutableList.of(createVarcharType(50), INTEGER)) + .put("group9", + ImmutableList.of(INTEGER, createVarcharType(60), createVarcharType(20), createVarcharType(30))) + .put("group10", + ImmutableList.of(INTEGER, createVarcharType(50), INTEGER, INTEGER, createVarcharType(50))) + .build(); + + private static final Map> SORT_CHANNELS = new ImmutableMap.Builder>() + .put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0, 1)) + .put("group3", ImmutableList.of(0, 1, 2)).put("group4", ImmutableList.of(0, 1)) + .put("group5", ImmutableList.of(0)).put("group6", ImmutableList.of(0, 1, 2)) + .put("group7", ImmutableList.of(0, 1, 2)).put("group8", ImmutableList.of(0, 1)) + .put("group9", ImmutableList.of(0, 1, 2, 3)).put("group10", ImmutableList.of(0, 1, 2, 3, 4)).build(); + + private static final Map> SORT_ORDERS = new ImmutableMap.Builder>() + .put("group1", ImmutableList.of(ASC_NULLS_FIRST)) + .put("group2", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group3", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group4", ImmutableList.of(DESC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group5", ImmutableList.of(ASC_NULLS_FIRST)) + .put("group6", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group7", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group8", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group9", ImmutableList.of(ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST)) + .put("group10", ImmutableList.of(DESC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST, ASC_NULLS_FIRST, + ASC_NULLS_FIRST)) + .build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8", "group9", "group10"}) + String testGroup = "group1"; + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List totalChannels = INPUT_TYPES.get(testGroup); + List sortChannels = SORT_CHANNELS.get(testGroup); + List outputChannels = new ArrayList<>(sortChannels); + List sortOrders = SORT_ORDERS.get(testGroup); + return createOrderByOmniOperatorFactory(0, + new PlanNodeId("test"), totalChannels, outputChannels, sortChannels, sortOrders); + } + } + + @Benchmark + public List orderBy(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkOrderByOmniOperator.class.getSimpleName() + ".*").build(); + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkRunner.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkRunner.java new file mode 100644 index 000000000..e3f16b7e7 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkRunner.java @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import org.apache.commons.io.FileUtils; +import org.openjdk.jmh.results.format.ResultFormatType; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.io.File; + +public class BenchmarkRunner +{ + private BenchmarkRunner() + { + } + + public static void main(String[] args) throws Exception + { + runBenchmark(args[0]); + } + + public static void runBenchmark(Class benchmarkClass) throws Exception + { + runBenchmark(benchmarkClass.getSimpleName()); + } + + public static void runBenchmark(String benchmarkClassName) throws Exception + { + FileUtils.forceMkdir(new File("benchmark-result")); + String benchmarkName = benchmarkClassName.replaceAll("\\.java", ""); + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + benchmarkName + ".*") + .shouldDoGC(true) + .resultFormat(ResultFormatType.CSV) + .result(System.getProperty("user.dir") + "/benchmark-result/" + benchmarkName + ".csv") + .forks(0) + .build(); + new Runner(options).run(); + System.exit(0); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOlkOperator.java new file mode 100644 index 000000000..571f56ea6 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOlkOperator.java @@ -0,0 +1,156 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.TopNOperator; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkTopNOlkOperator +{ + public static final int TOTAL_PAGES = 1000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(createVarcharType(16))) + .put("group2", ImmutableList.of(INTEGER, INTEGER)) + .put("group3", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group4", ImmutableList.of(BIGINT, INTEGER)).put("group5", ImmutableList.of(createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .put("group8", ImmutableList.of(createVarcharType(50), INTEGER)) + .put("group9", + ImmutableList.of(INTEGER, createVarcharType(60), createVarcharType(20), createVarcharType(30))) + .put("group10", + ImmutableList.of(INTEGER, createVarcharType(50), INTEGER, INTEGER, createVarcharType(50))) + .build(); + + private static final Map> SORT_CHANNELS = ImmutableMap.>builder() + .put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0, 1)) + .put("group3", ImmutableList.of(0, 1, 2)).put("group4", ImmutableList.of(0, 1)) + .put("group5", ImmutableList.of(0)).put("group6", ImmutableList.of(0, 1, 2)) + .put("group7", ImmutableList.of(0, 1, 2)).put("group8", ImmutableList.of(0, 1)) + .put("group9", ImmutableList.of(0, 1, 2, 3)).put("group10", ImmutableList.of(0, 1, 2, 3)).build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"1", "10", "100", "1000", "10000"}) + private String topN = "100"; + + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8", "group9", "group10"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Param({"32", "1024"}) + public String rowsPerPageStr = "1024"; + + private TopNOperator.TopNOperatorFactory createFactoryUnbounded(List sourceTypes, + List sortChannels, List sortOrder) + { + return new TopNOperator.TopNOperatorFactory(0, new PlanNodeId("test"), sourceTypes, Integer.valueOf(topN), + sortChannels, sortOrder); + } + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, + Integer.parseInt(rowsPerPageStr))); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, Integer.parseInt(rowsPerPageStr))); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List inputTypes = INPUT_TYPES.get(testGroup); + List sortChannels = SORT_CHANNELS.get(testGroup); + List sortOrders = new ArrayList<>(); + for (int i = 0; i < sortChannels.size(); i++) { + sortOrders.add(SortOrder.ASC_NULLS_LAST); + } + //Ungrouped + return createFactoryUnbounded(inputTypes, sortChannels, sortOrders); + } + } + + @Benchmark + public List topN(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + BenchmarkContext data = new BenchmarkContext(); + data.setup(); + new BenchmarkTopNOlkOperator().topN(data); + + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkTopNOlkOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOmniOperator.java new file mode 100644 index 000000000..4b2d813d3 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkTopNOmniOperator.java @@ -0,0 +1,150 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.TopNOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkTopNOmniOperator +{ + public static final int TOTAL_PAGES = 1000; + + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(createVarcharType(16))) + .put("group2", ImmutableList.of(INTEGER, INTEGER)) + .put("group3", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group4", ImmutableList.of(BIGINT, INTEGER)).put("group5", ImmutableList.of(createVarcharType(16))) + .put("group6", ImmutableList.of(INTEGER, INTEGER, INTEGER)) + .put("group7", ImmutableList.of(createVarcharType(20), createVarcharType(30), createVarcharType(50))) + .put("group8", ImmutableList.of(createVarcharType(50), INTEGER)) + .put("group9", + ImmutableList.of(INTEGER, createVarcharType(60), createVarcharType(20), createVarcharType(30))) + .put("group10", + ImmutableList.of(INTEGER, createVarcharType(50), INTEGER, INTEGER, createVarcharType(50))) + .build(); + + private static final Map> SORT_CHANNELS = ImmutableMap.>builder() + .put("group1", ImmutableList.of(0)).put("group2", ImmutableList.of(0, 1)) + .put("group3", ImmutableList.of(0, 1, 2)).put("group4", ImmutableList.of(0, 1)) + .put("group5", ImmutableList.of(0)).put("group6", ImmutableList.of(0, 1, 2)) + .put("group7", ImmutableList.of(0, 1, 2)).put("group8", ImmutableList.of(0, 1)) + .put("group9", ImmutableList.of(0, 1, 2, 3)).put("group10", ImmutableList.of(0, 1, 2, 3)).build(); + + @State(Scope.Thread) + public static class BenchmarkContext + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"1", "10", "100", "1000", "10000"}) + private String topN = "100"; + + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7", "group8", "group9", "group10"}) + String testGroup = "group1"; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + @Param({"32", "1024"}) + public String rowsPerPageStr = "1024"; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, + Integer.parseInt(rowsPerPageStr))); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, Integer.parseInt(rowsPerPageStr))); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List inputTypes = INPUT_TYPES.get(testGroup); + List sortChannels = SORT_CHANNELS.get(testGroup); + List sortOrders = new ArrayList<>(); + for (int i = 0; i < sortChannels.size(); i++) { + sortOrders.add(SortOrder.ASC_NULLS_LAST); + } + //Ungrouped + return new TopNOmniOperator.TopNOmniOperatorFactory(0, new PlanNodeId("test"), inputTypes, + Integer.parseInt(topN), sortChannels, sortOrders); + } + } + + @Benchmark + public List topN(BenchmarkContext context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + BenchmarkContext data = new BenchmarkContext(); + data.setup(); + new BenchmarkTopNOmniOperator().topN(data); + + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkTopNOmniOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOlkOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOlkOperator.java new file mode 100644 index 000000000..4ad2f4659 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOlkOperator.java @@ -0,0 +1,254 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.primitives.Ints; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.DummySpillerFactory; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.WindowFunctionDefinition; +import io.prestosql.operator.window.AggregateWindowFunction; +import io.prestosql.operator.window.FrameInfo; +import io.prestosql.operator.window.RankFunction; +import io.prestosql.operator.window.ReflectionWindowFunctionSupplier; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.connector.QualifiedObjectName; +import io.prestosql.spi.function.Signature; +import io.prestosql.spi.type.Type; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOlkOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.operator.TestWindowOperator.ROW_NUMBER; +import static io.prestosql.operator.TestWindowOperator.createFactoryUnbounded; +import static io.prestosql.operator.WindowFunctionDefinition.window; +import static io.prestosql.spi.function.FunctionKind.AGGREGATE; +import static io.prestosql.spi.sql.expression.Types.FrameBoundType.UNBOUNDED_FOLLOWING; +import static io.prestosql.spi.sql.expression.Types.FrameBoundType.UNBOUNDED_PRECEDING; +import static io.prestosql.spi.sql.expression.Types.WindowFrameType.RANGE; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkWindowOlkOperator +{ + public static final int TOTAL_PAGES = 1; + public static final int ROWS_PER_PAGE = 10000; + + public static final int NUMBER_OF_GROUP_COLUMNS = 2; + + private static final Metadata metadata = createTestMetadataManager(); + private static final FrameInfo UNBOUNDED_FRAME = new FrameInfo(RANGE, UNBOUNDED_PRECEDING, Optional.empty(), + UNBOUNDED_FOLLOWING, Optional.empty()); + + public static final List RANK = ImmutableList + .of(window(new ReflectionWindowFunctionSupplier<>("rank", BIGINT, ImmutableList.of(), RankFunction.class), BIGINT, UNBOUNDED_FRAME)); + + public static final List COUNT_BIGINT_GROUP2 = ImmutableList.of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("count"), AGGREGATE, + BIGINT.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager().getAggregateFunctionImplementation( + new Signature(QualifiedObjectName.valueOfDefaultFunction("count"), AGGREGATE, + BIGINT.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 2)); + public static final List AVG_BIGINT_GROUP3 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 7)); + public static final List AVG_BIGINT_GROUP5 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 1)); + public static final List AVG_BIGINT_GROUP6 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 1)); + public static final List AVG_BIGINT_GROUP7 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 6)); + + private static final Map> PARTITION_CHANNELS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0, 1)) + .put("group2", ImmutableList.of(0, 1, 2)).put("group3", ImmutableList.of(0, 1, 2, 3, 4)) + .put("group4", ImmutableList.of(0, 1, 2, 3)).put("group5", ImmutableList.of(1)) + .put("group6", ImmutableList.of(1)).put("group7", ImmutableList.of(0, 2, 3, 4)).build(); + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT)) + .put("group2", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT)) + .put("group3", + ImmutableList.of(createVarcharType(50), createVarcharType(50), createVarcharType(50), + createVarcharType(50), INTEGER, INTEGER, BIGINT, BIGINT)) + .put("group4", + ImmutableList.of(createVarcharType(50), createVarcharType(50), createVarcharType(50), + createVarcharType(50), INTEGER, INTEGER, BIGINT)) + .put("group5", ImmutableList.of(BIGINT, INTEGER)).put("group6", ImmutableList.of(BIGINT, INTEGER)) + .put("group7", ImmutableList.of(createVarcharType(50), createVarcharType(50), createVarcharType(50), + createVarcharType(50), createVarcharType(50), INTEGER, BIGINT)) + .build(); + + private static final Map> WINDOW_TYPES = ImmutableMap + .>builder().put("group1", ROW_NUMBER) + .put("group2", COUNT_BIGINT_GROUP2).put("group3", AVG_BIGINT_GROUP3).put("group4", RANK) + .put("group5", AVG_BIGINT_GROUP5).put("group6", AVG_BIGINT_GROUP6).put("group7", AVG_BIGINT_GROUP7) + .build(); + private static final Map> SORT_CHANNELS = ImmutableMap.>builder() + .put("group1", ImmutableList.of(3)).put("group2", ImmutableList.of(3)).put("group3", ImmutableList.of()) + .put("group4", ImmutableList.of(4, 5)).put("group5", ImmutableList.of()) + .put("group6", ImmutableList.of()).put("group7", ImmutableList.of()).build(); + + @State(Thread) + public static class Context + extends AbstractOlkOperatorBenchmarkContext + { + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7"}) + String testGroup; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + public int rowsPerPartition; + + @Param("0") + public int numberOfPregroupedColumns; + + public int partitionsPerGroup; + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List partitionChannels = PARTITION_CHANNELS.get(testGroup); + List windowType = WINDOW_TYPES.get(testGroup); + List inputTypes = INPUT_TYPES.get(testGroup); + List sortChannels = SORT_CHANNELS.get(testGroup); + List outputChannels = new ArrayList<>(); + List sortOrders = new ArrayList<>(); + + for (int i = 0; i < inputTypes.size(); i++) { + outputChannels.add(i); + } + + for (int i = 0; i < sortChannels.size(); i++) { + sortOrders.add(SortOrder.ASC_NULLS_LAST); + } + + if (numberOfPregroupedColumns == 0) { + //Ungrouped + return createFactoryUnbounded(inputTypes, outputChannels, windowType, partitionChannels, + Ints.asList(), sortChannels, sortOrders, 0, new DummySpillerFactory(), false); + } + else if (numberOfPregroupedColumns < NUMBER_OF_GROUP_COLUMNS) { + //Partially grouped + return createFactoryUnbounded(inputTypes, outputChannels, windowType, partitionChannels, + Ints.asList(1), sortChannels, sortOrders, 0, new DummySpillerFactory(), false); + } + else { + // Fully grouped and (potentially) sorted + return createFactoryUnbounded(inputTypes, outputChannels, windowType, partitionChannels, + Ints.asList(0, 1), sortChannels, sortOrders, + (numberOfPregroupedColumns - NUMBER_OF_GROUP_COLUMNS), new DummySpillerFactory(), false); + } + } + } + + @Benchmark + public List benchmark(Context context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkWindowOlkOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOmniOperator.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOmniOperator.java new file mode 100644 index 000000000..6fac66014 --- /dev/null +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BenchmarkWindowOmniOperator.java @@ -0,0 +1,265 @@ +/* + * Copyright (C) 2020-2022. Huawei Technologies Co., Ltd. All rights reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package nova.hetu.olk.operator.benchmark; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.primitives.Ints; +import io.prestosql.metadata.Metadata; +import io.prestosql.operator.DummySpillerFactory; +import io.prestosql.operator.OperatorFactory; +import io.prestosql.operator.WindowFunctionDefinition; +import io.prestosql.operator.window.AggregateWindowFunction; +import io.prestosql.operator.window.FrameInfo; +import io.prestosql.operator.window.RankFunction; +import io.prestosql.operator.window.ReflectionWindowFunctionSupplier; +import io.prestosql.spi.Page; +import io.prestosql.spi.block.SortOrder; +import io.prestosql.spi.connector.QualifiedObjectName; +import io.prestosql.spi.function.Signature; +import io.prestosql.spi.plan.PlanNodeId; +import io.prestosql.spi.type.Type; +import io.prestosql.spiller.SpillerFactory; +import nova.hetu.olk.operator.WindowOmniOperator; +import nova.hetu.olk.operator.benchmark.AbstractOperatorBenchmarkContext.AbstractOmniOperatorBenchmarkContext; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Threads; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.RunnerException; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; +import org.openjdk.jmh.runner.options.VerboseMode; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; + +import static io.prestosql.metadata.MetadataManager.createTestMetadataManager; +import static io.prestosql.operator.TestWindowOperator.ROW_NUMBER; +import static io.prestosql.operator.WindowFunctionDefinition.window; +import static io.prestosql.spi.function.FunctionKind.AGGREGATE; +import static io.prestosql.spi.sql.expression.Types.FrameBoundType.UNBOUNDED_FOLLOWING; +import static io.prestosql.spi.sql.expression.Types.FrameBoundType.UNBOUNDED_PRECEDING; +import static io.prestosql.spi.sql.expression.Types.WindowFrameType.RANGE; +import static io.prestosql.spi.type.BigintType.BIGINT; +import static io.prestosql.spi.type.DoubleType.DOUBLE; +import static io.prestosql.spi.type.IntegerType.INTEGER; +import static io.prestosql.spi.type.VarcharType.createVarcharType; +import static org.openjdk.jmh.annotations.Scope.Thread; + +@State(Scope.Thread) +@Fork(0) +@Threads(1) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@Warmup(iterations = 3, time = 1) +@Measurement(iterations = 5, time = 1) +public class BenchmarkWindowOmniOperator +{ + public static final int TOTAL_PAGES = 1; + public static final int ROWS_PER_PAGE = 10000; + + public static final int NUMBER_OF_GROUP_COLUMNS = 2; + + private static final Metadata metadata = createTestMetadataManager(); + private static final FrameInfo UNBOUNDED_FRAME = new FrameInfo(RANGE, UNBOUNDED_PRECEDING, Optional.empty(), + UNBOUNDED_FOLLOWING, Optional.empty()); + + public static final List RANK = ImmutableList + .of(window(new ReflectionWindowFunctionSupplier<>("rank", BIGINT, ImmutableList.of(), RankFunction.class), BIGINT, UNBOUNDED_FRAME)); + public static final List COUNT_BIGINT_GROUP2 = ImmutableList.of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("count"), AGGREGATE, + BIGINT.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager().getAggregateFunctionImplementation( + new Signature(QualifiedObjectName.valueOfDefaultFunction("count"), AGGREGATE, + BIGINT.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 2)); + public static final List AVG_BIGINT_GROUP3 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 7)); + public static final List AVG_BIGINT_GROUP5 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 1)); + public static final List AVG_BIGINT_GROUP6 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 1)); + public static final List AVG_BIGINT_GROUP7 = ImmutableList + .of(window( + AggregateWindowFunction.supplier( + new Signature(QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()), + metadata.getFunctionAndTypeManager() + .getAggregateFunctionImplementation(new Signature( + QualifiedObjectName.valueOfDefaultFunction("avg"), AGGREGATE, + DOUBLE.getTypeSignature(), BIGINT.getTypeSignature()))), + BIGINT, UNBOUNDED_FRAME, 6)); + + private static final Map> PARTITION_CHANNELS = ImmutableMap + .>builder().put("group1", ImmutableList.of(0, 1)) + .put("group2", ImmutableList.of(0, 1, 2)).put("group3", ImmutableList.of(0, 1, 2, 3, 4)) + .put("group4", ImmutableList.of(0, 1, 2, 3)).put("group5", ImmutableList.of(1)) + .put("group6", ImmutableList.of(1)).put("group7", ImmutableList.of(0, 2, 3, 4)).build(); + private static final Map> INPUT_TYPES = ImmutableMap + .>builder().put("group1", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT)) + .put("group2", ImmutableList.of(BIGINT, BIGINT, BIGINT, BIGINT)) + .put("group3", + ImmutableList.of(createVarcharType(50), createVarcharType(50), createVarcharType(50), + createVarcharType(50), INTEGER, INTEGER, BIGINT, BIGINT)) + .put("group4", + ImmutableList.of(createVarcharType(50), createVarcharType(50), createVarcharType(50), + createVarcharType(50), INTEGER, INTEGER, BIGINT)) + .put("group5", ImmutableList.of(BIGINT, INTEGER)).put("group6", ImmutableList.of(BIGINT, INTEGER)) + .put("group7", ImmutableList.of(createVarcharType(50), createVarcharType(50), createVarcharType(50), + createVarcharType(50), createVarcharType(50), INTEGER, BIGINT)) + .build(); + private static final Map> WINDOW_TYPES = ImmutableMap + .>builder().put("group1", ROW_NUMBER) + .put("group2", COUNT_BIGINT_GROUP2).put("group3", AVG_BIGINT_GROUP3).put("group4", RANK) + .put("group5", AVG_BIGINT_GROUP5).put("group6", AVG_BIGINT_GROUP6).put("group7", AVG_BIGINT_GROUP7) + .build(); + private static final Map> SORT_CHANNELS = ImmutableMap.>builder() + .put("group1", ImmutableList.of(3)).put("group2", ImmutableList.of(3)).put("group3", ImmutableList.of()) + .put("group4", ImmutableList.of(4, 5)).put("group5", ImmutableList.of()) + .put("group6", ImmutableList.of()).put("group7", ImmutableList.of()).build(); + + @State(Thread) + public static class Context + extends AbstractOmniOperatorBenchmarkContext + { + @Param({"group1", "group2", "group3", "group4", "group5", "group6", "group7"}) + String testGroup; + + @Param({"false", "true"}) + boolean dictionaryBlocks; + + public int rowsPerPartition; + + @Param("0") + public int numberOfPregroupedColumns; + + public int partitionsPerGroup; + + public static WindowOmniOperator.WindowOmniOperatorFactory createFactoryUnbounded( + List sourceTypes, List outputChannels, + List functions, List partitionChannels, + List preGroupedChannels, List sortChannels, List sortOrder, + int preSortedChannelPrefix, SpillerFactory spillerFactory, boolean spillEnabled) + { + return new WindowOmniOperator.WindowOmniOperatorFactory(0, new PlanNodeId("test"), sourceTypes, + outputChannels, functions, partitionChannels, preGroupedChannels, sortChannels, sortOrder, + preSortedChannelPrefix, 10); + } + + @Override + protected List buildPages() + { + List typesArray = INPUT_TYPES.get(testGroup); + List pages = new ArrayList<>(); + for (int i = 0; i < TOTAL_PAGES; i++) { + if (dictionaryBlocks) { + pages.add(PageBuilderUtil.createSequencePageWithDictionaryBlocks(typesArray, ROWS_PER_PAGE)); + } + else { + pages.add(PageBuilderUtil.createSequencePage(typesArray, ROWS_PER_PAGE)); + } + } + return pages; + } + + @Override + protected OperatorFactory createOperatorFactory() + { + List partitionChannels = PARTITION_CHANNELS.get(testGroup); + List windowType = WINDOW_TYPES.get(testGroup); + List inputTypes = INPUT_TYPES.get(testGroup); + List sortChannels = SORT_CHANNELS.get(testGroup); + List outputChannels = new ArrayList<>(); + List sortOrders = new ArrayList<>(); + + for (int i = 0; i < inputTypes.size(); i++) { + outputChannels.add(i); + } + + for (int i = 0; i < sortChannels.size(); i++) { + sortOrders.add(SortOrder.ASC_NULLS_LAST); + } + + if (numberOfPregroupedColumns == 0) { + //Ungrouped + return createFactoryUnbounded(inputTypes, outputChannels, windowType, partitionChannels, + Ints.asList(), sortChannels, sortOrders, 0, new DummySpillerFactory(), false); + } + else if (numberOfPregroupedColumns < NUMBER_OF_GROUP_COLUMNS) { + //Partially grouped + return createFactoryUnbounded(inputTypes, outputChannels, windowType, partitionChannels, + Ints.asList(1), sortChannels, sortOrders, 0, new DummySpillerFactory(), false); + } + else { + // Fully grouped and (potentially) sorted + return createFactoryUnbounded(inputTypes, outputChannels, windowType, partitionChannels, + Ints.asList(0, 1), sortChannels, sortOrders, + (numberOfPregroupedColumns - NUMBER_OF_GROUP_COLUMNS), new DummySpillerFactory(), false); + } + } + } + + @Benchmark + public List benchmark(Context context) + { + return context.doDefaultBenchMark(); + } + + public static void main(String[] args) throws RunnerException + { + Options options = new OptionsBuilder().verbosity(VerboseMode.NORMAL) + .include(".*" + BenchmarkWindowOmniOperator.class.getSimpleName() + ".*").build(); + + new Runner(options).run(); + } +} diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BlockUtil.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BlockUtil.java index 6761b679e..bb5046905 100644 --- a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BlockUtil.java +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/BlockUtil.java @@ -19,6 +19,7 @@ import io.airlift.slice.Slice; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.block.DictionaryBlock; +import io.prestosql.spi.type.CharType; import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.VarcharType; @@ -54,6 +55,17 @@ public final class BlockUtil return builder.build(); } + public static Block createStringSequenceBlock(int start, int end, CharType type) + { + BlockBuilder builder = type.createBlockBuilder(null, 100); + + for (int i = start; i < end; i++) { + type.writeString(builder, String.valueOf(i)); + } + + return builder.build(); + } + public static Block createIntegerSequenceBlock(int start, int end) { BlockBuilder builder = INTEGER.createFixedSizeBlockBuilder(end - start); @@ -81,6 +93,22 @@ public final class BlockUtil return new DictionaryBlock(builder.build(), ids); } + public static Block createStringDictionaryBlock(int start, int length, CharType type) + { + checkArgument(length > 5, "block must have more than 5 entries"); + + int dictionarySize = length / 5; + BlockBuilder builder = type.createBlockBuilder(null, dictionarySize); + for (int i = start; i < start + dictionarySize; i++) { + type.writeString(builder, String.valueOf(i)); + } + int[] ids = new int[length]; + for (int i = 0; i < length; i++) { + ids[i] = i % dictionarySize; + } + return new DictionaryBlock(builder.build(), ids); + } + public static Block createLongDictionaryBlock(int start, int length) { checkArgument(length > 5, "block must have more than 5 entries"); @@ -120,7 +148,7 @@ public final class BlockUtil int dictionarySize = length / 5; BlockBuilder builder = REAL.createBlockBuilder(null, dictionarySize); for (int i = start; i < start + dictionarySize; i++) { - REAL.writeLong(builder, i); + REAL.writeLong(builder, floatToRawIntBits((float) i)); } int[] ids = new int[length]; for (int i = 0; i < length; i++) { @@ -434,7 +462,7 @@ public final class BlockUtil String str = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; StringBuilder stringBuilder = new StringBuilder(); for (int j = 0; j < width; j++) { - stringBuilder.append(str.charAt(index + offset + j) % str.length()); + stringBuilder.append(str.charAt((index + offset + j) % str.length())); } return stringBuilder.toString(); } diff --git a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/PageBuilderUtil.java b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/PageBuilderUtil.java index ea09c979b..384ff3d61 100644 --- a/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/PageBuilderUtil.java +++ b/omnioperator/omniop-openlookeng-extension/src/test/java/nova/hetu/olk/operator/benchmark/PageBuilderUtil.java @@ -18,6 +18,7 @@ package nova.hetu.olk.operator.benchmark; import io.prestosql.block.BlockAssertions; import io.prestosql.spi.Page; import io.prestosql.spi.block.Block; +import io.prestosql.spi.type.CharType; import io.prestosql.spi.type.DecimalType; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.VarcharType; @@ -68,6 +69,10 @@ public final class PageBuilderUtil blocks[i] = BlockUtil.createStringSequenceBlock(initialValue, initialValue + length, (VarcharType) type); } + else if (type instanceof CharType) { + blocks[i] = BlockUtil.createStringSequenceBlock(initialValue, initialValue + length, + (CharType) type); + } else if (type.equals(BOOLEAN)) { blocks[i] = BlockAssertions.createBooleanSequenceBlock(initialValue, initialValue + length); } @@ -120,6 +125,10 @@ public final class PageBuilderUtil blocks[i] = BlockUtil.createStringDictionaryBlock(initialValue, initialValue + length, (VarcharType) type); } + else if (type instanceof CharType) { + blocks[i] = BlockUtil.createStringDictionaryBlock(initialValue, initialValue + length, + (CharType) type); + } else if (type.equals(BOOLEAN)) { blocks[i] = BlockUtil.createBooleanDictionaryBlock(initialValue, initialValue + length); } -- Gitee