Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
77 commits
Select commit Hold shift + click to select a range
ca33d8b
Fix build
Mar 14, 2024
1abba25
Merge branch 'master' of github.com:findingrish/druid
Jun 10, 2024
4982882
Merge remote-tracking branch 'upstream/master'
Jun 20, 2024
1940170
Merge remote-tracking branch 'upstream/master'
Jul 17, 2024
4c1711e
Merge remote-tracking branch 'upstream/master'
Jul 21, 2024
61f9b4b
Add IT to test BackwardIncompatibility
Jul 21, 2024
a58e7ed
minor change
Jul 21, 2024
c41cdeb
test
Jul 21, 2024
b08cf00
test
Jul 21, 2024
00b8f29
disable some gha checks temporarily
Jul 21, 2024
a351a48
Set env while running revised it
Jul 21, 2024
ccad637
another way to set env
Jul 21, 2024
bfad1e4
log envs in it.sh
Jul 21, 2024
b290f87
test
Jul 21, 2024
666609e
fix minor bug
Jul 21, 2024
a5e4863
pass env vars in resusable-revised-its
Jul 21, 2024
0616c79
enable maven build again
Jul 21, 2024
97486b5
remove env override from pom
Jul 22, 2024
4df3296
temporarily remove conditional
Jul 22, 2024
4d97b55
use correct jdk version in docker tag step
Jul 22, 2024
f977f24
cache the previous image as well
Jul 22, 2024
3a5c798
minor change
Jul 22, 2024
8811712
fix env var usage
Jul 22, 2024
f8aa164
Remove test changes to set env var in it.sh
Jul 22, 2024
b067b30
Debug failure in untarring the previous version
Jul 22, 2024
721532c
Cleanup test changes
Jul 22, 2024
3604451
Derive BACKWARD_INCOMPATIBILITY_IT_ENABLED and DRUID_PREVIOUS_IT_IMAG…
Jul 26, 2024
3ac2385
Rename test group to BackwardCompatibility
Jul 26, 2024
7c34af3
rename config var in gha
Jul 26, 2024
5054ba5
Update job to set env var
Jul 26, 2024
7aaa9fd
temp step to debug env vars are set correctly
Jul 26, 2024
bf93720
minor change
Jul 26, 2024
3c493dc
CircularList round-robin iterator for the KillUnusedSegments duty (#1…
abhishekrb19 Jul 26, 2024
ed48cb8
[Docs} Remove avro_ocf support from Kafka & Kinesis streaming sources…
techdocsmith Jul 26, 2024
028ee23
[Docs] batch 03 - trig functions (#16795)
edgar2020 Jul 26, 2024
c07aeed
[docs] Updating Rollup tutorial (#16762)
edgar2020 Jul 26, 2024
caedeb6
Add API to update compaction engine (#16803)
kfaraz Jul 27, 2024
6fad8c3
Cleanup
Jul 29, 2024
cbca0dc
Bump jclouds.version from 2.5.0 to 2.6.0 (#16796)
dependabot[bot] Jul 29, 2024
f5527dc
Bump io.grpc:grpc-netty-shaded from 1.57.2 to 1.65.1 (#16731)
dependabot[bot] Jul 29, 2024
c7cde31
HAVING clauses may not contain window functions (#16742)
kgyrtkirk Jul 29, 2024
6ff0460
Rename test group, add docs
Jul 29, 2024
c5f89e0
Add existing tests to BackwardCompatibilityMain group
Jul 29, 2024
6c896ec
Fix tests
Jul 29, 2024
e9ea243
Enable compaction ITs on MSQ engine (#16778)
gargvishesh Jul 30, 2024
74631ef
add license to new files
Jul 30, 2024
92a40d8
Add API to fetch conflicting task locks (#16799)
AmatyaAvadhanula Jul 30, 2024
954aaaf
Refactor: Clean up compaction config classes (#16810)
kfaraz Jul 30, 2024
6f325a0
Fix checkstyle, update docs
Jul 30, 2024
85a8a1d
[Docs]Batch04 - Bitwise numeric functions (#16805)
edgar2020 Jul 30, 2024
3bb6d40
[docs] batch 5 updating functions (#16812)
edgar2020 Jul 31, 2024
01f6cfc
MSQ worker: Support in-memory shuffles. (#16790)
gianm Jul 31, 2024
bb4d6cc
Add task report fields in response of SQL statements endpoint (#16808)
Akshat-Jain Aug 1, 2024
8c170f7
Web console: use stages, counters, and warnings from the new detailed…
vogievetsky Aug 1, 2024
63ba5a4
Fix issues with fetching task reports in SQL statements endpoint for …
Akshat-Jain Aug 2, 2024
9b731e8
Kinesis Input Format for timestamp, and payload parsing (#16813)
zachjsh Aug 2, 2024
fe6772a
Rename test builder `MSQTester.setExpectedSegment` (#16837)
abhishekrb19 Aug 2, 2024
9dc2569
Track and emit segment loading rate for HttpLoadQueuePeon on Coordina…
kfaraz Aug 3, 2024
8562f0e
Merge remote-tracking branch 'upstream/master' into backward-incompat…
Aug 3, 2024
e0159a2
fix checkstyle
Aug 3, 2024
31b4375
Add `druid.indexing.formats.stringMultiValueHandlingMode` system conf…
abhishekrb19 Aug 3, 2024
c89ef54
Minor change
Aug 3, 2024
38565a8
minor change
Aug 4, 2024
3dc0be1
Merge remote-tracking branch 'upstream/master' into backward-incompat…
Aug 5, 2024
c7eacd0
fallback SQL IN filter to expression filter when VirtualColumnRegistr…
sreemanamala Aug 5, 2024
0411c4e
Add metrics for number of rows/bytes materialized while running subqu…
LakshSingla Aug 5, 2024
c84e689
Don't use ComplexMetricExtractor to fetch the class of the object in …
LakshSingla Aug 5, 2024
c8323d1
Add indexer task success and failure metrics (#16829)
rbankar7 Aug 5, 2024
08f9ec1
Memoize the redundant calls to overlord in sql statements endpoint (#…
Akshat-Jain Aug 5, 2024
26e3c44
Quidem record (#16624)
kgyrtkirk Aug 5, 2024
461727d
Fix Druid Console cannot open submit supervisor dialog (#16736)
AlbericByte Aug 5, 2024
aeace28
Web console: Add columnMapping information to the Explain dialog (#16…
vogievetsky Aug 5, 2024
82c4a24
Try setting aws configs for backward compatibility test
Aug 6, 2024
8e4af05
Merge remote-tracking branch 'upstream/master' into backward-incompat…
Aug 6, 2024
cc31175
Temporarily disable ITBCMainSystemTableBatchIndexTaskTest
Aug 6, 2024
b934f46
Refactor
Aug 6, 2024
a071285
Remove configs for enabling the backward compatibility IT
Aug 6, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
49 changes: 48 additions & 1 deletion .github/workflows/reusable-revised-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,19 @@ on:
AWS_SECRET_ACCESS_KEY:
required: false
type: string
BACKWARD_COMPATIBILITY_IT_ENABLED:
required: false
type: string
default: false
DRUID_PREVIOUS_VERSION:
required: false
type: string
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL:
required: false
type: string
DRUID_PREVIOUS_IT_IMAGE_NAME:
required: false
type: string

env:
MYSQL_DRIVER_CLASSNAME: ${{ inputs.mysql_driver }} # Used by tests to connect to metadata store directly.
Expand Down Expand Up @@ -106,6 +119,15 @@ jobs:
./druid-container-jdk${{ inputs.build_jdk }}.tar.gz
./integration-tests-ex/image/target/env.sh

- name: Retrieve previous version cached docker image
id: docker-restore-previous-version
if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' }}
uses: actions/cache/restore@v4
with:
key: druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz-${{ github.sha }}
path: |
./druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz

- name: Maven build
if: steps.maven-restore.outputs.cache-hit != 'true' || ( steps.docker-restore.outputs.cache-hit != 'true' && steps.targets-restore.outputs.cache-hit != 'true' )
run: |
Expand All @@ -115,13 +137,26 @@ jobs:
if: steps.docker-restore.outputs.cache-hit != 'true' || steps.maven-restore.outputs.cache-hit != 'true'
env:
docker-restore: ${{ toJson(steps.docker-restore.outputs) }}
BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
DRUID_PREVIOUS_VERSION: ${{ inputs.DRUID_PREVIOUS_VERSION }}
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ inputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
run: |
./it.sh image
source ./integration-tests-ex/image/target/env.sh
docker tag $DRUID_IT_IMAGE_NAME $DRUID_IT_IMAGE_NAME-jdk${{ inputs.build_jdk }}
echo $DRUID_IT_IMAGE_NAME
docker save "$DRUID_IT_IMAGE_NAME" | gzip > druid-container-jdk${{ inputs.build_jdk }}.tar.gz

- name: Save previous version docker image
if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' && (steps.docker-restore.outputs.cache-hit != 'true' || steps.maven-restore.outputs.cache-hit != 'true') }}
env:
docker-restore: ${{ toJson(steps.docker-restore.outputs) }}
run: |
docker tag ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }} ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}
echo ${DRUID_PREVIOUS_IT_IMAGE_NAME}
docker save "${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}" | gzip > druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz

- name: Stop and remove docker containers
run: |
echo "Force stopping all containers and pruning"
Expand All @@ -133,9 +168,21 @@ jobs:
docker load --input druid-container-jdk${{ inputs.build_jdk }}.tar.gz
docker images

- name: Load previous version docker image
if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' }}
run: |
docker load --input druid-container-jdk${{ inputs.build_jdk }}-version${{ inputs.DRUID_PREVIOUS_VERSION }}.tar.gz
docker images

- name: Run IT
id: run-it
run: ${{ inputs.script }}
env:
BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
DRUID_PREVIOUS_VERSION: ${{ inputs.DRUID_PREVIOUS_VERSION }}
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ inputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
run: |
${{ inputs.script }}

- name: Collect docker logs on failure
if: ${{ failure() && steps.run-it.conclusion == 'failure' }}
Expand Down
39 changes: 39 additions & 0 deletions .github/workflows/revised-its.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,24 @@
name: "Revised ITs workflow"
on:
workflow_call:
inputs:
BACKWARD_COMPATIBILITY_IT_ENABLED:
description: "Flag for backward compatibility IT"
required: false
default: false
type: string
DRUID_PREVIOUS_VERSION:
description: "Previous druid versions to run the test against."
required: false
type: string
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL:
description: "URL to download the previous druid version."
required: false
type: string
DRUID_PREVIOUS_IT_IMAGE_NAME:
description: "Druid previous version image name."
required: false
type: string
workflow_dispatch:

jobs:
Expand Down Expand Up @@ -79,3 +97,24 @@ jobs:
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: admin
AWS_SECRET_ACCESS_KEY: miniopassword

backward-compatibility-it:
needs: changes
uses: ./.github/workflows/reusable-revised-its.yml
if: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' && (needs.changes.outputs.core == 'true' || needs.changes.outputs.common-extensions == 'true') }}
with:
build_jdk: 8
runtime_jdk: 8
use_indexer: middleManager
script: ./it.sh github BackwardCompatibilityMain
it: BackwardCompatibilityMain
mysql_driver: com.mysql.jdbc.Driver
BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ inputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
DRUID_PREVIOUS_VERSION: ${{ inputs.DRUID_PREVIOUS_VERSION }}
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ inputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ inputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
DRUID_CLOUD_BUCKET: druid-qa
DRUID_CLOUD_PATH: aws-${{ github.run_id }}-${{ github.run_attempt }}
AWS_REGION: us-east-1
AWS_ACCESS_KEY_ID: admin
AWS_SECRET_ACCESS_KEY: miniopassword
50 changes: 49 additions & 1 deletion .github/workflows/unit-and-integration-tests-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,30 @@ env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 5

jobs:
set-env-var:
name: Set env var
runs-on: ubuntu-latest
outputs:
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ steps.image_name.outputs.image_name }}
BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ steps.it_enabled.outputs.enabled }}
DRUID_PREVIOUS_VERSION: ${{ env.DRUID_PREVIOUS_VERSION }}
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ env.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
steps:
- name: Set image name env var
id: image_name
run: |
echo "::set-output name=image_name::org.apache.druid.integration-tests/test:${{ env.DRUID_PREVIOUS_VERSION }}"
- name: Set env for enabling backward compatibility it
id: it_enabled
run: |
if [ -n "${{ env.DRUID_PREVIOUS_VERSION }}" ]; then
echo "::set-output name=enabled::true"
else
echo "::set-output name=enabled::false"
fi

build:
needs: set-env-var
name: "build (jdk${{ matrix.jdk }})"
strategy:
fail-fast: false
Expand Down Expand Up @@ -94,12 +117,25 @@ jobs:
./druid-container-jdk${{ matrix.jdk }}.tar.gz
./integration-tests-ex/image/target/env.sh

- name: Cache previous version image
id: docker_container_previous_version
uses: actions/cache@v4
with:
key: druid-container-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}.tar.gz-${{ github.sha }}
path: |
./druid-container-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}.tar.gz

- name: Maven build
id: maven_build
run: |
./it.sh ci

- name: Container build
env:
BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
DRUID_PREVIOUS_VERSION: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
run: |
./it.sh image
source ./integration-tests-ex/image/target/env.sh
Expand All @@ -111,6 +147,13 @@ jobs:
echo $DRUID_IT_IMAGE_NAME
docker save "$DRUID_IT_IMAGE_NAME" | gzip > druid-container-jdk${{ matrix.jdk }}.tar.gz

- name: Save previous version docker image
if: ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED == 'true' }}
run: |
docker tag ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }} ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
echo ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
docker save "${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}" | gzip > druid-container-jdk${{ matrix.jdk }}-version${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}.tar.gz

unit-tests-phase2:
strategy:
fail-fast: false
Expand Down Expand Up @@ -142,6 +185,11 @@ jobs:
uses: ./.github/workflows/standard-its.yml

revised-its:
needs: unit-tests
needs: [unit-tests, set-env-var]
if: ${{ always() && (needs.unit-tests.result == 'success' || needs.unit-tests.outputs.continue_tests) }}
uses: ./.github/workflows/revised-its.yml
with:
BACKWARD_COMPATIBILITY_IT_ENABLED: ${{ needs.set-env-var.outputs.BACKWARD_COMPATIBILITY_IT_ENABLED }}
DRUID_PREVIOUS_VERSION: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION }}
DRUID_PREVIOUS_VERSION_DOWNLOAD_URL: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_VERSION_DOWNLOAD_URL }}
DRUID_PREVIOUS_IT_IMAGE_NAME: ${{ needs.set-env-var.outputs.DRUID_PREVIOUS_IT_IMAGE_NAME }}
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ README
.pmdruleset.xml
.java-version
integration-tests/gen-scripts/
/bin/
**/bin/
*.hprof
**/.ipynb_checkpoints/
website/.yarn/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.guice.NestedDataModule;
import org.apache.druid.guice.BuiltInTypesModule;
import org.apache.druid.jackson.AggregatorsModule;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Pair;
Expand Down Expand Up @@ -68,7 +68,7 @@ public class GroupByDeserializationBenchmark

static {
NullHandling.initializeForTests();
NestedDataModule.registerHandlersAndSerde();
BuiltInTypesModule.registerHandlersAndSerde();
AggregatorsModule.registerComplexMetricsAndSerde();
}

Expand All @@ -93,7 +93,7 @@ public class GroupByDeserializationBenchmark
public void setup() throws JsonProcessingException
{
final ObjectMapper undecoratedMapper = TestHelper.makeJsonMapper();
undecoratedMapper.registerModules(NestedDataModule.getJacksonModulesList());
undecoratedMapper.registerModules(BuiltInTypesModule.getJacksonModulesList());
undecoratedMapper.registerModule(new AggregatorsModule());
final Pair<GroupByQuery, String> sqlQueryAndResultRow = sqlQueryAndResultRow(
numDimensions,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import org.apache.druid.frame.read.FrameReader;
import org.apache.druid.frame.testutil.FrameSequenceBuilder;
import org.apache.druid.frame.write.FrameWriters;
import org.apache.druid.guice.NestedDataModule;
import org.apache.druid.guice.BuiltInTypesModule;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.NonnullPair;
Expand Down Expand Up @@ -85,7 +85,7 @@ public class FrameChannelMergerBenchmark
{
static {
NullHandling.initializeForTests();
NestedDataModule.registerHandlersAndSerde();
BuiltInTypesModule.registerHandlersAndSerde();
}

private static final String KEY = "key";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.hook.DruidHookDispatcher;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -202,7 +203,8 @@ public void setup() throws JsonProcessingException
new CalciteRulesManager(ImmutableSet.of()),
CalciteTests.createJoinableFactoryWrapper(),
CatalogResolver.NULL_RESOLVER,
new AuthConfig()
new AuthConfig(),
new DruidHookDispatcher()
);

String prefix = ("explain plan for select long1 from foo where long1 in ");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@
import org.apache.druid.sql.calcite.util.LookylooModule;
import org.apache.druid.sql.calcite.util.QueryFrameworkUtils;
import org.apache.druid.sql.calcite.util.testoperator.CalciteTestOperatorModule;
import org.apache.druid.sql.hook.DruidHookDispatcher;
import org.apache.druid.timeline.DataSegment;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
Expand Down Expand Up @@ -674,7 +675,8 @@ public static Pair<PlannerFactory, SqlEngine> createSqlSystem(
new CalciteRulesManager(ImmutableSet.of()),
new JoinableFactoryWrapper(QueryFrameworkUtils.createDefaultJoinableFactory(injector)),
CatalogResolver.NULL_RESOLVER,
new AuthConfig()
new AuthConfig(),
new DruidHookDispatcher()
);

return Pair.of(plannerFactory, engine);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.hook.DruidHookDispatcher;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -364,7 +365,8 @@ public void setup()
new CalciteRulesManager(ImmutableSet.of()),
CalciteTests.createJoinableFactoryWrapper(),
CatalogResolver.NULL_RESOLVER,
new AuthConfig()
new AuthConfig(),
new DruidHookDispatcher()
);

try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.guice.NestedDataModule;
import org.apache.druid.guice.BuiltInTypesModule;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.guava.Sequence;
Expand Down Expand Up @@ -57,6 +57,7 @@
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.hook.DruidHookDispatcher;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -89,7 +90,7 @@ public class SqlGroupByBenchmark
static {
NullHandling.initializeForTests();
ExpressionProcessing.initializeForTests();
NestedDataModule.registerHandlersAndSerde();
BuiltInTypesModule.registerHandlersAndSerde();
}

private static final Logger log = new Logger(SqlGroupByBenchmark.class);
Expand Down Expand Up @@ -331,7 +332,7 @@ public void setup()

// Hacky and pollutes global namespace, but it is fine since benchmarks are run in isolation. Wasn't able
// to work up a cleaner way of doing it by modifying the injector.
CalciteTests.getJsonMapper().registerModules(NestedDataModule.getJacksonModulesList());
CalciteTests.getJsonMapper().registerModules(BuiltInTypesModule.getJacksonModulesList());

final DruidSchemaCatalog rootSchema =
CalciteTests.createMockRootSchema(conglomerate, walker, plannerConfig, AuthTestUtils.TEST_AUTHORIZER_MAPPER);
Expand All @@ -347,7 +348,8 @@ public void setup()
new CalciteRulesManager(ImmutableSet.of()),
CalciteTests.createJoinableFactoryWrapper(),
CatalogResolver.NULL_RESOLVER,
new AuthConfig()
new AuthConfig(),
new DruidHookDispatcher()
);

try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import org.apache.druid.sql.calcite.run.SqlEngine;
import org.apache.druid.sql.calcite.schema.DruidSchemaCatalog;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.hook.DruidHookDispatcher;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.LinearShardSpec;
import org.openjdk.jmh.annotations.Benchmark;
Expand Down Expand Up @@ -402,7 +403,8 @@ public void setup()
new CalciteRulesManager(ImmutableSet.of()),
CalciteTests.createJoinableFactoryWrapper(),
CatalogResolver.NULL_RESOLVER,
new AuthConfig()
new AuthConfig(),
new DruidHookDispatcher()
);

try {
Expand Down
Loading