<?xml version="1.0" encoding="UTF-8"?>
<!--
  Copyright (c) 2020-2023, NVIDIA CORPORATION.

  Licensed under the Apache License, Version 2.0 (the "License");
  you may not use this file except in compliance with the License.
  You may obtain a copy of the License at

     http://www.apache.org/licenses/LICENSE-2.0

  Unless required by applicable law or agreed to in writing, software
  distributed under the License is distributed on an "AS IS" BASIS,
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.nvidia</groupId>
    <artifactId>rapids-4-spark-parent</artifactId>
    <name>RAPIDS Accelerator for Apache Spark Root Project</name>
    <description>The root project of the RAPIDS Accelerator for Apache Spark</description>
    <version>23.02.0</version>
    <packaging>pom</packaging>

    <url>https://nvidia.github.io/spark-rapids/</url>
    <licenses>
        <license>
            <name>Apache License, Version 2.0</name>
            <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
            <distribution>repo</distribution>
        </license>
    </licenses>
    <scm>
        <connection>scm:git:https://github.com/NVIDIA/spark-rapids.git</connection>
        <developerConnection>scm:git:git@github.com:NVIDIA/spark-rapids.git</developerConnection>
        <tag>HEAD</tag>
        <url>https://github.com/NVIDIA/spark-rapids</url>
    </scm>
    <developers>
        <developer>
            <id>revans2</id>
            <name>Robert Evans</name>
            <email>roberte@nvidia.com</email>
            <roles>
                <role>Committer</role>
            </roles>
            <timezone>-6</timezone>
        </developer>
        <developer>
            <id>tgravescs</id>
            <name>Thomas Graves</name>
            <email>tgraves@nvidia.com</email>
            <roles>
                <role>Committer</role>
            </roles>
            <timezone>-6</timezone>
        </developer>
        <developer>
            <id>jlowe</id>
            <name>Jason Lowe</name>
            <email>jlowe@nvidia.com</email>
            <roles>
                <role>Committer</role>
            </roles>
            <timezone>-6</timezone>
        </developer>
    </developers>
    <profiles>
        <profile>
            <id>release311</id>
            <activation>
                <activeByDefault>true</activeByDefault>
                <property>
                    <name>buildver</name>
                    <value>311</value>
                </property>
            </activation>
            <properties>
                <buildver>311</buildver>
                <spark.version>${spark311.version}</spark.version>
                <spark.test.version>${spark311.version}</spark.test.version>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <spark.shim.sources>${spark311.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark311.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>api_validation</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=312db -->
            <id>release312db</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>312db</value>
                </property>
            </activation>
            <properties>
                <buildver>312db</buildver>
                <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 -->
                <scala.plugin.version>3.4.4</scala.plugin.version>
                <spark.version.classifier>spark312db</spark.version.classifier>
                <!--
                     Note that we are using the Spark version for all of the Databricks dependencies as well.
                     The jenkins/databricks/build.sh script handles installing the jars as maven artifacts.
                     This is to make it easier and not have to change version numbers for each individual dependency
                     and deal with differences between Databricks versions
                -->
                <spark.version>${spark312db.version}</spark.version>
                <spark.test.version>${spark312db.version}</spark.test.version>
                <hadoop.client.version>2.7.4</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <spark.shim.sources>${spark312db.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark312db.test.sources}</spark.shim.test.sources>
            </properties>

            <modules>
                <module>delta-lake/delta-stub</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release312</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>312</value>
                </property>
            </activation>
            <properties>
                <buildver>312</buildver>
                <spark.version>${spark312.version}</spark.version>
                <spark.test.version>${spark312.version}</spark.test.version>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <spark.shim.sources>${spark312.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark312.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
                <module>api_validation</module>
            </modules>
        </profile>
        <profile>
            <id>release313</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>313</value>
                </property>
            </activation>
            <properties>
                <buildver>313</buildver>
                <spark.version>${spark313.version}</spark.version>
                <spark.test.version>${spark313.version}</spark.test.version>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <spark.shim.sources>${spark313.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark313.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
                <module>api_validation</module>
            </modules>
        </profile>
        <profile>
            <id>release314</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>314</value>
                </property>
            </activation>
            <properties>
                <buildver>314</buildver>
                <spark.version>${spark314.version}</spark.version>
                <spark.test.version>${spark314.version}</spark.test.version>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <spark.shim.sources>${spark314.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark314.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
                <module>api_validation</module>
            </modules>
        </profile>
        <profile>
            <id>release320</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>320</value>
                </property>
            </activation>
            <properties>
                <buildver>320</buildver>
                <spark.version>${spark320.version}</spark.version>
                <spark.test.version>${spark320.version}</spark.test.version>
                <parquet.hadoop.version>1.12.1</parquet.hadoop.version>
                <spark.shim.sources>${spark320.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark320.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release321</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>321</value>
                </property>
            </activation>
            <properties>
                <buildver>321</buildver>
                <spark.version>${spark321.version}</spark.version>
                <spark.test.version>${spark321.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <spark.shim.sources>${spark321.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark321.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release321cdh</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>321cdh</value>
                </property>
            </activation>
            <properties>
                <buildver>321cdh</buildver>
                <spark.version>${spark321cdh.version}</spark.version>
                <spark.test.version>${spark321cdh.version}</spark.test.version>
                <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
                <spark.shim.sources>${spark321cdh.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark321cdh.test.sources}</spark.shim.test.sources>
            </properties>
            <repositories>
                <repository>
                    <id>cloudera-repo</id>
                    <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
                </repository>
            </repositories>
            <modules>
                <module>delta-lake/delta-20x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release322</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>322</value>
                </property>
            </activation>
            <properties>
                <buildver>322</buildver>
                <spark.version>${spark322.version}</spark.version>
                <spark.test.version>${spark322.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <spark.shim.sources>${spark322.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark322.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release323</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>323</value>
                </property>
            </activation>
            <properties>
                <buildver>323</buildver>
                <spark.version>${spark323.version}</spark.version>
                <spark.test.version>${spark323.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <spark.shim.sources>${spark323.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark323.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-20x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=321db -->
            <id>release321db</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>321db</value>
                </property>
            </activation>
            <properties>
                <buildver>321db</buildver>
                <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 -->
                <scala.plugin.version>3.4.4</scala.plugin.version>
                <spark.version.classifier>spark321db</spark.version.classifier>
                <!--
                     Note that we are using the Spark version for all of the Databricks dependencies as well.
                     The jenkins/databricks/build.sh script handles installing the jars as maven artifacts.
                     This is to make it easier and not have to change version numbers for each individual dependency
                     and deal with differences between Databricks versions
                -->
                <spark.version>${spark321db.version}</spark.version>
                <spark.test.version>${spark321db.version}</spark.test.version>
                <hadoop.client.version>3.3.1</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.12.0</parquet.hadoop.version>
                <spark.shim.sources>${spark321db.sources}</spark.shim.sources>
                <spark.shim.test.sources>${spark321db.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-spark321db</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release330</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>330</value>
                </property>
            </activation>
            <properties>
                <buildver>330</buildver>
                <spark.version>${spark330.version}</spark.version>
                <spark.test.version>${spark330.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <spark.shim.sources>${spark330.sources}</spark.shim.sources>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <spark.shim.test.sources>${spark330.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release331</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>331</value>
                </property>
            </activation>
            <properties>
                <buildver>331</buildver>
                <spark.version>${spark331.version}</spark.version>
                <spark.test.version>${spark331.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <spark.shim.sources>${spark331.sources}</spark.shim.sources>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <spark.shim.test.sources>${spark331.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release332</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>332</value>
                </property>
            </activation>
            <properties>
                <buildver>332</buildver>
                <spark.version>${spark332.version}</spark.version>
                <spark.test.version>${spark332.version}</spark.test.version>
                <parquet.hadoop.version>1.12.2</parquet.hadoop.version>
                <spark.shim.sources>${spark332.sources}</spark.shim.sources>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <spark.shim.test.sources>${spark332.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release340</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>340</value>
                </property>
            </activation>
            <properties>
                <buildver>340</buildver>
                <spark.version>${spark340.version}</spark.version>
                <spark.test.version>${spark340.version}</spark.test.version>
                <parquet.hadoop.version>1.12.3</parquet.hadoop.version>
                <spark.shim.sources>${spark340.sources}</spark.shim.sources>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <spark.shim.test.sources>${spark340.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-stub</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>release330cdh</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>330cdh</value>
                </property>
            </activation>
            <properties>
                <buildver>330cdh</buildver>
                <spark.version>${spark330cdh.version}</spark.version>
                <spark.test.version>${spark330cdh.version}</spark.test.version>
                <parquet.hadoop.version>1.10.99.7.1.8.0-801</parquet.hadoop.version>
                <spark.shim.sources>${spark330cdh.sources}</spark.shim.sources>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <spark.shim.test.sources>${spark330cdh.test.sources}</spark.shim.test.sources>
            </properties>
            <repositories>
                <repository>
                    <id>cloudera-repo</id>
                    <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
                    <releases>
                        <enabled>true</enabled>
                    </releases>
                    <snapshots>
                        <enabled>true</enabled>
                    </snapshots>
                </repository>
            </repositories>
            <modules>
                <module>delta-lake/delta-21x</module>
                <module>delta-lake/delta-22x</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=330db -->
            <!-- Note that 330db backports many features from Spark3.4.0 -->
            <id>release330db</id>
            <activation>
                <property>
                    <name>buildver</name>
                    <value>330db</value>
                </property>
            </activation>
            <properties>
                <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 -->
                <scala.plugin.version>3.4.4</scala.plugin.version>
                <spark.version.classifier>spark330db</spark.version.classifier>
                <!--
                     Note that we are using the Spark version for all of the Databricks dependencies as well.
                     The jenkins/databricks/build.sh script handles installing the jars as maven artifacts.
                     This is to make it easier and not have to change version numbers for each individual dependency
                     and deal with differences between Databricks versions
                -->
                <spark.version>${spark330db.version}</spark.version>
                <spark.test.version>${spark330db.version}</spark.test.version>
                <hadoop.client.version>3.3.1</hadoop.client.version>
                <rat.consoleOutput>true</rat.consoleOutput>
                <parquet.hadoop.version>1.12.0</parquet.hadoop.version>
                <spark.shim.sources>${spark330db.sources}</spark.shim.sources>
                <iceberg.version>${spark330.iceberg.version}</iceberg.version>
                <spark.shim.test.sources>${spark330db.test.sources}</spark.shim.test.sources>
            </properties>
            <modules>
                <module>delta-lake/delta-spark330db</module>
                <module>dist</module>
                <module>integration_tests</module>
                <module>shuffle-plugin</module>
                <module>sql-plugin</module>
                <module>tests</module>
                <module>udf-compiler</module>
                <module>aggregator</module>
            </modules>
        </profile>
        <profile>
            <id>udf-compiler</id>
            <modules>
                <module>udf-compiler</module>
            </modules>
        </profile>
        <profile>
            <id>source-javadoc</id>
            <build>
                <plugins>
                    <plugin>
                        <groupId>org.apache.maven.plugins</groupId>
                        <artifactId>maven-source-plugin</artifactId>
                        <version>3.0.0</version>
                        <executions>
                            <execution>
                                <id>attach-source</id>
                                <goals>
                                    <goal>jar-no-fork</goal>
                                </goals>
                            </execution>
                        </executions>
                    </plugin>
                    <plugin>
                        <groupId>org.apache.maven.plugins</groupId>
                        <artifactId>maven-javadoc-plugin</artifactId>
                        <version>3.0.0</version>
                        <executions>
                            <execution>
                                <id>attach-javadoc</id>
                                <goals>
                                    <goal>jar</goal>
                                </goals>
                            </execution>
                        </executions>
                        <configuration>
                            <doclint>none</doclint>
                        </configuration>
                    </plugin>
                </plugins>
            </build>
        </profile>
    </profiles>

    <properties>
        <allowConventionalDistJar>false</allowConventionalDistJar>
        <buildver>311</buildver>
        <maven.compiler.source>1.8</maven.compiler.source>
        <maven.compiler.target>1.8</maven.compiler.target>
        <java.major.version>8</java.major.version>
        <spark.version>${spark311.version}</spark.version>
        <spark.test.version>${spark.version}</spark.test.version>
        <parquet.hadoop.version>1.10.1</parquet.hadoop.version>
        <spark.version.classifier>spark${buildver}</spark.version.classifier>
        <cuda.version>cuda11</cuda.version>
        <spark-rapids-jni.version>23.02.0</spark-rapids-jni.version>
        <scala.binary.version>2.12</scala.binary.version>
        <alluxio.client.version>2.8.0</alluxio.client.version>
        <scala.recompileMode>incremental</scala.recompileMode>
        <scala.version>2.12.15</scala.version>
        <scala.javac.args>-Xlint:all,-serial,-path,-try</scala.javac.args>
        <ucx.version>1.12.1</ucx.version>
        <!--
             If the shade package changes we need to also update jenkins/spark-premerge-build.sh
             so code coverage does not include the shaded classes.
        -->
        <rapids.shade.package>${spark.version.classifier}.com.nvidia.shaded.spark</rapids.shade.package>
        <!--
            cuda-toolkit 11.5+ will install nvidia GDS package by default
            if no compatible GDS device on test machine, the scala test would fail
            so we exclude GdsTest tag as default
        -->
        <test.exclude.tags>GdsTest</test.exclude.tags>
        <test.include.tags/>
        <rapids.shuffle.manager.override>true</rapids.shuffle.manager.override>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <project.reporting.sourceEncoding>UTF-8</project.reporting.sourceEncoding>
        <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
        <pytest.TEST_TAGS>not qarun</pytest.TEST_TAGS>
        <pytest.TEST_PARALLEL/>
        <pytest.TEST_TYPE>developer</pytest.TEST_TYPE>
        <rat.consoleOutput>false</rat.consoleOutput>
        <!--
         If you update a dependency version so it is no longer a SNAPSHOT
         please update the snapshot-shims profile as well so it is accurate -->
        <spark311.version>3.1.1</spark311.version>
        <spark312.version>3.1.2</spark312.version>
        <spark312db.version>3.1.2-databricks</spark312db.version>
        <spark313.version>3.1.3</spark313.version>
        <spark314.version>3.1.4-SNAPSHOT</spark314.version>
        <spark320.version>3.2.0</spark320.version>
        <spark321.version>3.2.1</spark321.version>
        <spark321cdh.version>3.2.1.3.2.7171000.0-3</spark321cdh.version>
        <spark321db.version>3.2.1-databricks</spark321db.version>
        <spark322.version>3.2.2</spark322.version>
        <spark323.version>3.2.3</spark323.version>
        <spark330.version>3.3.0</spark330.version>
        <spark331.version>3.3.1</spark331.version>
        <spark332.version>3.3.2-SNAPSHOT</spark332.version>
        <spark340.version>3.4.0-SNAPSHOT</spark340.version>
        <spark330cdh.version>3.3.0.3.3.7180.0-274</spark330cdh.version>
        <spark330db.version>3.3.0-databricks</spark330db.version>
        <mockito.version>3.6.0</mockito.version>
        <scala.plugin.version>4.3.0</scala.plugin.version>
        <maven.jar.plugin.version>3.2.0</maven.jar.plugin.version>
        <scalatest-maven-plugin.version>2.0.2</scalatest-maven-plugin.version>
        <guava.cdh.version>30.0-jre</guava.cdh.version>
        <arrow.cdh.version>2.0.0</arrow.cdh.version>
        <slf4j.version>1.7.30</slf4j.version>
        <flatbuffers.java.version>1.11.0</flatbuffers.java.version>
        <hadoop.client.version>3.3.1</hadoop.client.version>
        <iceberg.version>0.13.2</iceberg.version>
        <scala.local-lib.path>org/scala-lang/scala-library/${scala.version}/scala-library-${scala.version}.jar</scala.local-lib.path>
        <target.classifier>${spark.version.classifier}</target.classifier>
        <maven.clean.plugin.version>3.1.0</maven.clean.plugin.version>
        <maven.scalastyle.skip>false</maven.scalastyle.skip>
        <dist.jar.compress>true</dist.jar.compress>
        <spark330.iceberg.version>0.14.1</spark330.iceberg.version>
        <!--
            If true, disables verification that all Shims be built as of one and the same git
            commit hash. Do not use for CI!

            It is intended only for local builds of the dist module when combining locally-built shims
            with the ones deployed to a remote Maven repo
        -->
        <ignore.shim.revisions.check>false</ignore.shim.revisions.check>
        <noSnapshot.buildvers>
            311,
            312,
            313,
            320,
            321,
            321cdh,
            322,
            323,
            330,
            331,
            330cdh
        </noSnapshot.buildvers>
        <snapshot.buildvers>
            332
        </snapshot.buildvers>
        <databricks.buildvers>
            312db,
            321db,
            330db
        </databricks.buildvers>
        <!--
          Build and run unit tests on one specific version for each sub-version (e.g. 311, 320, 330)
          Base shim version (311 currently) should be covered in default mvn verify command of premerge script,
          so base shim version is removed from the premergeUT list.
          Separate the versions to two parts (premergeUT1, premergeUT2) for balancing the duration
        -->
        <premergeUT1.buildvers>
            320
        </premergeUT1.buildvers>
        <premergeUT2.buildvers>
            330
        </premergeUT2.buildvers>
        <premergeUTF8.buildvers>
            320
        </premergeUTF8.buildvers>
        <jdk11.buildvers>
            312,
            321,
            331
        </jdk11.buildvers>
        <all.buildvers>
            ${noSnapshot.buildvers},
            ${snapshot.buildvers},
            ${databricks.buildvers},
            340
        </all.buildvers>
    </properties>

    <dependencyManagement>
        <dependencies>
            <dependency>
              <groupId>com.nvidia</groupId>
              <artifactId>spark-rapids-jni</artifactId>
              <version>${spark-rapids-jni.version}</version>
              <classifier>${cuda.version}</classifier>
            </dependency>
            <dependency>
                <groupId>org.openucx</groupId>
                <artifactId>jucx</artifactId>
                <version>${ucx.version}</version>
            </dependency>
            <dependency>
              <groupId>org.slf4j</groupId>
              <artifactId>jul-to-slf4j</artifactId>
              <version>${slf4j.version}</version>
              <!-- runtime scope is appropriate, but causes SBT build problems -->
            </dependency>
            <dependency>
              <groupId>org.slf4j</groupId>
              <artifactId>jcl-over-slf4j</artifactId>
              <version>${slf4j.version}</version>
              <!-- runtime scope is appropriate, but causes SBT build problems -->
            </dependency>
            <dependency>
              <groupId>org.scala-lang</groupId>
              <artifactId>scala-library</artifactId>
              <version>${scala.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.iceberg</groupId>
                <artifactId>iceberg-api</artifactId>
                <version>${iceberg.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.iceberg</groupId>
                <artifactId>iceberg-bundled-guava</artifactId>
                <version>${iceberg.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.iceberg</groupId>
                <artifactId>iceberg-core</artifactId>
                <version>${iceberg.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-annotation_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-hive_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-sql_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.apache.spark</groupId>
              <artifactId>spark-avro_${scala.binary.version}</artifactId>
              <version>${spark.version}</version>
              <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>com.google.flatbuffers</groupId>
                <artifactId>flatbuffers-java</artifactId>
                <version>${flatbuffers.java.version}</version>
                <scope>compile</scope>  <!-- for shade -->
            </dependency>
            <dependency>
              <groupId>org.rogach</groupId>
              <artifactId>scallop_${scala.binary.version}</artifactId>
              <version>3.5.1</version>
            </dependency>
            <dependency>
                <!-- Used for Alluxio mounting -->
                <groupId>org.alluxio</groupId>
                <artifactId>alluxio-shaded-client</artifactId>
                <version>${alluxio.client.version}</version>
                <scope>provided</scope>
            </dependency>
            <dependency>
              <groupId>org.scalatest</groupId>
              <artifactId>scalatest_${scala.binary.version}</artifactId>
              <version>3.0.5</version>
              <scope>test</scope>
            </dependency>
            <dependency>
              <groupId>org.junit.jupiter</groupId>
              <artifactId>junit-jupiter-api</artifactId>
              <version>5.4.2</version>
              <scope>test</scope>
            </dependency>
            <dependency>
              <groupId>org.mockito</groupId>
              <artifactId>mockito-core</artifactId>
              <version>${mockito.version}</version>
              <scope>test</scope>
            </dependency>
        </dependencies>
    </dependencyManagement>

    <build>
        <directory>${project.basedir}/target/${target.classifier}</directory>
        <pluginManagement>
            <plugins>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>build-helper-maven-plugin</artifactId>
                    <version>3.3.0</version>
                </plugin>
                <plugin>
                  <groupId>org.apache.maven.plugins</groupId>
                  <artifactId>maven-antrun-plugin</artifactId>
                  <version>3.1.0</version>
                  <executions>
                    <execution>
                        <id>create-source-path-properties</id>
                        <goals><goal>run</goal></goals>
                        <phase>initialize</phase>
                        <configuration>
                            <exportAntProperties>true</exportAntProperties>
                            <target xmlns:ac="antlib:net.sf.antcontrib">
                                <!--
                                    Rules for adding new shim directories:
                                    ######################################
                                    1. Keep includes sorted
                                    2. Top path components such as 311until320-all should not use
                                    wildcards until a robust pattern is developed that allows
                                    refactoring for new shims without breaking build for
                                    exisiting shims
                                    3. Using wild cards to pick up scala and java with a single entry
                                    311until320-all/* is allowed
                                    4. Using wildcards for trivial excludes is allowed
                                    - *nondb* dirs in db shims
                                    - *until330* in 330
                                    5. Begin with a common reusable pattern, and put the unique includeds
                                    and excludes at the end
                                    6. At the same pattern-nesting level list includes before excludes
                                -->

                                <!-- upstream Spark shims -->
                                <pathconvert property="spark311.sources" pathsep=",">
                                    <dirset id="spark311.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark311+.pattern">
                                            <include name="311+-non330db/*"/>
                                            <include name="311+-nondb/*"/>
                                            <include name="311until320-all/*"/>
                                            <include name="311until320-noncdh/*"/>
                                            <include name="311until320-nondb/*"/>
                                            <include name="311until330-all/*"/>
                                            <include name="311until330-nondb/*"/>
                                            <include name="311until340-all/*"/>
                                            <include name="311until340-non330db/*"/>
                                            <include name="311until340-nondb/*"/>
                                            <include name="pre320-treenode/*"/>
                                        </patternset>
                                        <include name="311-nondb/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark312.sources" pathsep=",">
                                    <dirset id="spark312.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark311+.pattern"/>
                                        <include name="312-nondb/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark313.sources" pathsep=",">
                                    <dirset id="spark313.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark311+.pattern"/>
                                        <include name="313/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark314.sources" pathsep=",">
                                    <dirset id="spark314.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark311+.pattern"/>
                                        <include name="314/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark320.sources" pathsep=",">
                                    <dirset id="spark320.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark320+.pattern">
                                            <!-- inherit from 311+ with exceptions -->
                                            <patternset refid="spark311+.pattern"/>
                                            <exclude name="*until320*/*"/>
                                            <exclude name="pre320-treenode/*"/>

                                            <!-- uniquely 320+ -->
                                            <include name="320+/*"/>
                                            <include name="320+-noncdh/*"/>
                                            <include name="320+-nondb/*"/>
                                            <include name="320until330-all/*"/>
                                            <include name="320until330-noncdh/*"/>
                                            <include name="320until330-nondb/*"/>
                                            <include name="320until340-all/*"/>
                                            <include name="320until340-non330db/*"/>
                                            <include name="delta-lake-common/*"/>
                                            <include name="post320-treenode/*"/>
                                        </patternset>
                                        <include name="320/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark321.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark321+.pattern">
                                            <patternset refid="spark320+.pattern"/>
                                            <include name="321+/*"/>
                                            <include name="321until330-all/*"/>
                                        </patternset>
                                        <include name="321/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark322.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark321+.pattern"/>
                                        <include name="322/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark323.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark321+.pattern"/>
                                        <include name="323/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark330.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark330+.pattern">
                                            <patternset refid="spark321+.pattern"/>
                                            <exclude name="*until330*/*"/>

                                            <!-- uniquely 330+ -->
                                            <include name="330+/*"/>
                                            <include name="330+-nondb/*"/>
                                            <include name="330+-noncdh/*"/>
                                            <include name="330until340/*"/>
                                            <include name="330until340-nondb/*"/>
                                        </patternset>
                                        <include name="330/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark331.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark331+.pattern">
                                            <patternset refid="spark330+.pattern"/>
                                            <include name="331+/*"/>
                                        </patternset>
                                        <include name="331/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark332.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark331+.pattern"/>
                                        <include name="332/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark340.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark340+.pattern">
                                            <!-- inherit from 331+ with exceptions -->
                                            <patternset refid="spark331+.pattern"/>
                                            <exclude name="*until340*/*"/>
                                            <include name="340+-and-330db/*"/>
                                            <!-- uniquely 340+ -->
                                            <include name="340+/*"/>
                                        </patternset>
                                        <include name="340/*"/>
                                    </dirset>
                                </pathconvert>

                                <!-- Spark vendor shims -->
                                <pathconvert property="spark321cdh.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <!-- inherit from 321+ upstream except noncdh -->
                                        <patternset refid="spark321+.pattern"/>
                                        <exclude name="*noncdh*/*"/>

                                        <include name="321cdh/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark330cdh.sources" pathsep=",">
                                    <dirset id="spark330cdh.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <!-- inherit from 330+ upstream except noncdh -->
                                        <patternset refid="spark330+.pattern"/>
                                        <exclude name="*noncdh*/*"/>

                                        <include name="330cdh/*"/>
                                    </dirset>
                                </pathconvert>

                                <pathconvert property="spark312db.sources" pathsep=",">
                                    <dirset id="spark312db.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <!-- inherit 311+ except nondb -->
                                        <patternset refid="spark311+.pattern"/>
                                        <exclude name="pre320-treenode/*"/>
                                        <exclude name="*nondb*/*"/>

                                        <include name="311+-db/*"/>
                                        <include name="31xdb/*"/>
                                        <include name="post320-treenode/*"/>

                                        <include name="312db/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark321db.sources" pathsep=",">
                                    <dirset id="spark321db.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset id="spark321db+.pattern">
                                            <!-- inherit 321+ except nondb -->
                                            <patternset refid="spark321+.pattern"/>
                                            <exclude name="*nondb*/*"/>

                                            <include name="311+-db/*"/>
                                            <include name="321+-db/*"/>
                                        </patternset>
                                        <include name="321db/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark330db.sources" pathsep=",">
                                    <dirset id="spark330db.dirset.id" dir="${project.basedir}/src/main" erroronmissingdir="false">
                                        <patternset refid="spark321db+.pattern"/>
                                        <patternset refid="spark330+.pattern"/>
                                        <include name="340+-and-330db/*"/>
                                        <include name="330db/*"/>
                                        <exclude name="*non330db*/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark311.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="311/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark312.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="312/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark313.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="313/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark320.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="320/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark321.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="321/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark321cdh.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="321cdh/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark322.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="322/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark323.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="323/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark330.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="330/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                        <include name="330+/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark331.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="331/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                        <include name="330+/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark332.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="332/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                        <include name="330+/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark330cdh.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="330cdh/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark312db.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="312db/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark321db.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="321db/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark330db.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="330db/*"/>
                                    </dirset>
                                </pathconvert>
                                <pathconvert property="spark340.test.sources" pathsep=",">
                                    <dirset dir="${project.basedir}/src/test" erroronmissingdir="false">
                                        <include name="340/*"/>
                                        <include name="320+-noncdh-nondb/*"/>
                                        <include name="330+/*"/>
                                    </dirset>
                                </pathconvert>
                            </target>
                        </configuration>
                    </execution>
                    <execution>
                        <id>generate-build-info</id>
                        <phase>generate-resources</phase>
                        <configuration>
                            <!-- Execute the shell script to generate the plugin build information. -->
                            <target name="build-info">
                                <mkdir dir="${project.build.directory}/extra-resources"/>
                                <mkdir dir="${project.build.directory}/tmp"/>
                                <exec executable="bash"
                                      output="${project.build.directory}/extra-resources/rapids4spark-version-info.properties"
                                      resultproperty="build-info.exitCode"
                                      errorproperty="build-info.errorMsg"
                                      failonerror="false">
                                    <arg value="${spark.rapids.source.basedir}/build/build-info"/>
                                    <arg value="${project.version}"/>
                                    <arg value="${spark-rapids-jni.version}"/>
                                </exec>
                                <fail message="exec build-info.sh failed, exit code is ${build-info.exitCode}, error msg is ${build-info.errorMsg}">
                                    <condition>
                                        <not>
                                            <equals arg1="${build-info.exitCode}" arg2="0"/>
                                        </not>
                                    </condition>
                                </fail>
                            </target>
                        </configuration>

                        <goals>
                            <goal>run</goal>
                        </goals>
                    </execution>
                  </executions>
                <dependencies>
                    <dependency>
                        <groupId>org.apache.ant</groupId>
                        <artifactId>ant</artifactId>
                        <version>1.10.12</version>
                    </dependency>
                    <dependency>
                        <groupId>ant-contrib</groupId>
                        <artifactId>ant-contrib</artifactId>
                        <version>1.0b3</version>
                    </dependency>
                </dependencies>
                </plugin>
                <plugin>
                  <groupId>org.apache.maven.plugins</groupId>
                  <artifactId>maven-shade-plugin</artifactId>
                  <version>3.2.4</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-surefire-plugin</artifactId>
                    <version>2.12.4</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-compiler-plugin</artifactId>
                    <executions>
                        <execution>
                            <id>default-compile</id>
                            <phase>none</phase>
                        </execution>
                        <execution>
                            <id>default-testCompile</id>
                            <phase>none</phase>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>net.alchim31.maven</groupId>
                    <artifactId>scala-maven-plugin</artifactId>
                    <version>${scala.plugin.version}</version>
                    <executions>
                        <execution>
                            <id>eclipse-add-source</id>
                            <goals>
                                <goal>add-source</goal>
                            </goals>
                        </execution>
                        <execution>
                            <id>scala-compile-first</id>
                            <phase>process-resources</phase>
                            <goals>
                                <goal>compile</goal>
                            </goals>
                        </execution>
                        <execution>
                            <id>scala-test-compile-first</id>
                            <phase>process-test-resources</phase>
                            <goals>
                                <goal>testCompile</goal>
                            </goals>
                        </execution>
                        <execution>
                            <id>attach-scaladocs</id>
                            <phase>verify</phase>
                            <goals>
                                <goal>doc-jar</goal>
                            </goals>
                            <configuration>
                                <args>
                                    <arg>-doc-external-doc:${java.home}/lib/rt.jar#https://docs.oracle.com/javase/${java.major.version}/docs/api/index.html</arg>
                                    <arg>-doc-external-doc:${settings.localRepository}/${scala.local-lib.path}#https://scala-lang.org/api/${scala.version}/</arg>
                                    <arg>-doc-external-doc:${settings.localRepository}/org/apache/spark/spark-sql_${scala.binary.version}/${spark.version}/spark-sql_${scala.binary.version}-${spark.version}.jar#https://spark.apache.org/docs/${spark.version}/api/scala/index.html</arg>
                                </args>
                            </configuration>
                        </execution>
                    </executions>
                    <configuration>
                        <scalaVersion>${scala.version}</scalaVersion>
                        <checkMultipleScalaVersions>true</checkMultipleScalaVersions>
                        <failOnMultipleScalaVersions>true</failOnMultipleScalaVersions>
                        <recompileMode>${scala.recompileMode}</recompileMode>
                        <args>
                            <arg>-unchecked</arg>
                            <arg>-deprecation</arg>
                            <arg>-feature</arg>
                            <arg>-explaintypes</arg>
                            <arg>-Yno-adapted-args</arg>
                            <arg>-Ywarn-unused:imports</arg>
                            <arg>-Xlint:missing-interpolator</arg>
                            <arg>-Xfatal-warnings</arg>
                        </args>
                        <jvmArgs>
                            <jvmArg>-Xms1024m</jvmArg>
                            <jvmArg>-Xmx1024m</jvmArg>
                        </jvmArgs>
                        <addJavacArgs>${scala.javac.args}</addJavacArgs>
                    </configuration>
                </plugin>
                <plugin>
                    <groupId>org.scalatest</groupId>
                    <artifactId>scalatest-maven-plugin</artifactId>
                    <version>${scalatest-maven-plugin.version}</version>
                    <configuration>
                        <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
                        <junitxml>.</junitxml>
                        <filereports>scala-test-output.txt</filereports>
                        <argLine>${argLine} -ea -Xmx4g -Xss4m</argLine>
                        <stderr/>
                        <systemProperties>
                            <rapids.shuffle.manager.override>${rapids.shuffle.manager.override}</rapids.shuffle.manager.override>
                            <ai.rapids.refcount.debug>true</ai.rapids.refcount.debug>
                            <java.awt.headless>true</java.awt.headless>
                            <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir>
                            <spark.ui.enabled>false</spark.ui.enabled>
                            <spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress>
                            <spark.unsafe.exceptionOnMemoryLeak>true</spark.unsafe.exceptionOnMemoryLeak>
                        </systemProperties>
                        <tagsToExclude>${test.exclude.tags}</tagsToExclude>
                        <tagsToInclude>${test.include.tags}</tagsToInclude>
                    </configuration>
                    <executions>
                        <execution>
                            <id>test</id>
                            <goals>
                                <goal>test</goal>
                            </goals>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.apache.rat</groupId>
                    <artifactId>apache-rat-plugin</artifactId>
                    <version>0.13</version>
                    <configuration>
                        <consoleOutput>${rat.consoleOutput}</consoleOutput>
                    </configuration>
                    <executions>
                        <execution>
                            <phase>verify</phase>
                            <goals>
                                <goal>check</goal>
                            </goals>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.jacoco</groupId>
                    <artifactId>jacoco-maven-plugin</artifactId>
                    <version>0.8.5</version>
                </plugin>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-jar-plugin</artifactId>
                    <version>${maven.jar.plugin.version}</version>
                    <executions>
                        <execution>
                            <id>default-test-jar</id>
                            <goals>
                                <goal>test-jar</goal>
                            </goals>
                            <configuration>
                                <classifier>${spark.version.classifier}tests</classifier>
                                <skipIfEmpty>true</skipIfEmpty>
                            </configuration>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.codehaus.mojo</groupId>
                    <artifactId>exec-maven-plugin</artifactId>
                    <version>3.0.0</version>
                </plugin>
            </plugins>
        </pluginManagement>

        <plugins>
            <plugin>
                <groupId>org.apache.rat</groupId>
                <artifactId>apache-rat-plugin</artifactId>
                <configuration>
                    <excludes>
                        <exclude>**/*.md</exclude>
                        <exclude>**/*.iml</exclude>
                        <exclude>NOTICE-binary</exclude>
                        <exclude>docs/dev/idea-code-style-settings.xml</exclude>
                        <exclude>pom.xml.asc</exclude>
                        <exclude>jenkins/databricks/*.patch</exclude>
                        <exclude>*.jar</exclude>
                        <exclude>docs/demo/**/*.ipynb</exclude>
                        <exclude>docs/demo/**/*.zpln</exclude>
                        <exclude>**/src/main/resources/META-INF/services/*</exclude>
                        <exclude>**/src/test/resources/**</exclude>
                        <exclude>rmm_log.txt</exclude>
                        <exclude>dependency-reduced-pom*.xml</exclude>
                        <exclude>**/.*/**</exclude>
                        <exclude>**/src/main/java/com/nvidia/spark/rapids/format/*</exclude>
                        <exclude>**/*.csv</exclude>
                        <exclude>dist/*.txt</exclude>
                        <exclude>**/META-INF/com.nvidia.spark.rapids.SparkShimServiceProvider</exclude>
                        <!-- Apache Rat excludes target folder for projects that are included by
                        default, but there are some projects that are conditionally included.  -->
                        <exclude>**/target/**/*</exclude>
                        <exclude>**/cufile.log</exclude>
                    </excludes>
                </configuration>
            </plugin>

            <!--use this plugin to configure "spark.rapids.source.basedir" property-->
            <plugin>
                <groupId>org.commonjava.maven.plugins</groupId>
                <artifactId>directory-maven-plugin</artifactId>
                <version>0.1</version>
                <executions>
                    <execution>
                        <id>directories</id>
                        <goals>
                            <goal>highest-basedir</goal>
                        </goals>
                        <phase>initialize</phase>
                        <configuration>
                            <property>spark.rapids.source.basedir</property>
                        </configuration>
                    </execution>
                </executions>
            </plugin>

            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-antrun-plugin</artifactId>
                <!-- parent-pom only executions -->
                <inherited>false</inherited>
                <executions>
                    <execution>
                        <!--
                        This is an alternative implementation of the scalastyle check invocation,
                        a replacement for scalastyle-maven-plugin. It's motivated to address the following:
                        - All scala files are checked at once regardless of the module, so the developer
                        can focus on addressing violations without being distracted by the build issues
                        in-between
                        - We don't have to hardcode the source code roots added dynamically by other maven
                        plugins to the project
                        - The scalastyle launch cost is amortized across all modules
                        -->
                        <id>scalastyle-all-modules</id>
                        <phase>verify</phase>
                        <goals><goal>run</goal></goals>
                        <configuration>
                            <skip>${maven.scalastyle.skip}</skip>
                            <target>
                                <pathconvert property="scalastyle.dirs" pathsep=" ">
                                    <dirset dir="${project.basedir}" includes="**/src/main/scala"/>
                                    <dirset dir="${project.basedir}" includes="**/src/main/*/scala"/>
                                    <dirset dir="${project.basedir}" includes="**/src/test/scala"/>
                                    <dirset dir="${project.basedir}" includes="**/src/test/*/scala"/>
                                </pathconvert>
                                <echo>Checking scalastyle for all modules using following paths:
                                    ${scalastyle.dirs}
                                </echo>
                                <java classname="org.scalastyle.Main" failonerror="true">
                                    <arg line="--verbose false"/>
                                    <arg line="--warnings false"/>
                                    <arg line="--config scalastyle-config.xml"/>
                                    <arg line="--xmlOutput ${project.basedir}/target/scalastyle-output.xml"/>
                                    <arg line="--inputEncoding ${project.build.sourceEncoding}"/>
                                    <arg line="--xmlEncoding ${project.reporting.outputEncoding}"/>
                                    <arg line="${scalastyle.dirs}"/>
                                </java>
                            </target>
                        </configuration>
                    </execution>
                    <execution>
                        <id>clean-all-modules</id>
                        <phase>clean</phase>
                        <goals><goal>run</goal></goals>
                        <configuration>
                            <skip>${maven.cleanall.skip}</skip>
                            <target>
                                <dirset dir="${project.basedir}" includes="**/target" id="target.dirs.for.clean"/>
                                <pathconvert property="target.dirs.str" pathsep=" ">
                                   <dirset refid="target.dirs.for.clean"/>
                                </pathconvert>
                                <echo>Cleaning build directories of all modules ${target.dirs.str}</echo>
                                <!-- workaround ant delete does not work with dirset -->
                                <exec dir="${project.basedir}" executable="rm">
                                    <arg value="-rf"/>
                                    <arg line="${target.dirs.str}"/>
                                </exec>
                            </target>
                        </configuration>
                    </execution>
                </executions>
                <dependencies>
                    <dependency>
                        <groupId>org.scalastyle</groupId>
                        <artifactId>scalastyle_${scala.binary.version}</artifactId>
                        <version>1.0.0</version>
                    </dependency>
                </dependencies>
            </plugin>
            <plugin>
                <groupId>org.codehaus.mojo</groupId>
                <artifactId>build-helper-maven-plugin</artifactId>
                <executions>
                    <execution>
                        <id>add-shim-sources</id>
                        <phase>generate-sources</phase>
                        <goals><goal>add-source</goal></goals>
                        <configuration>
                            <sources>${spark.shim.sources}</sources>
                        </configuration>
                    </execution>
                    <execution>
                        <id>add-shim-test-sources</id>
                        <phase>generate-test-sources</phase>
                        <goals><goal>add-test-source</goal></goals>
                        <configuration>
                            <sources>${spark.shim.test.sources}</sources>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>org.jacoco</groupId>
                <artifactId>jacoco-maven-plugin</artifactId>
                <executions>
                    <execution>
                        <id>prepare-agent</id>
                        <goals>
                            <goal>prepare-agent</goal>
                        </goals>
                        <configuration>
                            <append>true</append>
                            <excludes>
                                <exclude>${rapids.shade.package}.*</exclude>
                            </excludes>
                            <includes>
                                <include>ai.rapids.cudf.*</include>
                                <include>com.nvidia.spark.*</include>
                                <include>org.apache.spark.sql.rapids.*</include>
                            </includes>
                        </configuration>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>
    <repositories>
        <repository>
            <id>snapshots-repo</id>
            <url>https://oss.sonatype.org/content/repositories/snapshots</url>
            <releases>
                <enabled>false</enabled>
            </releases>
            <snapshots>
                <enabled>true</enabled>
            </snapshots>
        </repository>
        <repository>
            <id>apache-snapshots-repo</id>
            <url>https://repository.apache.org/content/repositories/snapshots/</url>
            <releases>
                <enabled>false</enabled>
            </releases>
            <snapshots>
                <enabled>true</enabled>
            </snapshots>
        </repository>
    </repositories>
</project>
