Skip to content

Commit 4395cd4

Browse files
authored
Support spark3.4 (#2808)
1 parent b00b75f commit 4395cd4

File tree

16 files changed

+1157
-12
lines changed

16 files changed

+1157
-12
lines changed

assembly/src/main/assembly/assembly.xml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,5 +54,14 @@
5454
<include>**/*</include>
5555
</includes>
5656
</fileSet>
57+
<fileSet>
58+
<directory>
59+
${project.parent.basedir}/spark-wrapper/spark-3.4/target/classes/
60+
</directory>
61+
<outputDirectory>resources/spark-wrapper-spark-3_4</outputDirectory>
62+
<includes>
63+
<include>**/*</include>
64+
</includes>
65+
</fileSet>
5766
</fileSets>
5867
</assembly>

core/src/main/scala/com/pingcap/tispark/TiSparkInfo.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import org.tikv.common.exception.TiInternalException
2222
object TiSparkInfo {
2323
private final val logger = LoggerFactory.getLogger(getClass.getName)
2424

25-
val SUPPORTED_SPARK_VERSION: List[String] = "3.0" :: "3.1" :: "3.2" :: "3.3" :: Nil
25+
val SUPPORTED_SPARK_VERSION: List[String] = "3.0" :: "3.1" :: "3.2" :: "3.3" :: "3.4" :: Nil
2626

2727
val SPARK_VERSION: String = org.apache.spark.SPARK_VERSION
2828

core/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/SpecialSum.scala

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,15 @@ case class SpecialSum(child: Expression, retType: DataType, initVal: Any)
107107
override def dataType: DataType = resultType
108108

109109
override def checkInputDataTypes(): TypeCheckResult =
110-
TypeUtils.checkForNumericExpr(child.dataType, "function sum")
110+
checkForNumericExpr(child.dataType, "function sum")
111+
112+
def checkForNumericExpr(dt: DataType, caller: String): TypeCheckResult = {
113+
if (dt.isInstanceOf[NumericType] || dt == NullType) {
114+
TypeCheckResult.TypeCheckSuccess
115+
} else {
116+
TypeCheckResult.TypeCheckFailure(s"$caller requires numeric types, not ${dt.catalogString}")
117+
}
118+
}
111119

112120
/**
113121
* The implement is same as the [[org.apache.spark.sql.catalyst.expressions.aggregate.Sum]]

core/src/test/scala/com/pingcap/tispark/auth/TiAuthIntegrationSuite.scala

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,10 @@ class TiAuthIntegrationSuite extends SharedSQLContext {
9999
exception should not have message(
100100
s"DELETE command denied to user `$user`@% for table default.`$hive_table`")
101101
val errorMessage = exception.getMessage
102-
assert(errorMessage.contains(s"DELETE is only supported with v2 tables."))
102+
assert(
103+
errorMessage.contains(s"DELETE is only supported with v2 tables.") ||
104+
// For Spark 3.4, which is fixed in 3.5
105+
errorMessage.contains("[INTERNAL_ERROR] Unexpected table relation: HiveTableRelation"))
103106

104107
spark.sql(s"DROP TABLE IF EXISTS `$hive_table`")
105108
}
@@ -156,7 +159,10 @@ class TiAuthIntegrationSuite extends SharedSQLContext {
156159
spark.sql(s"select * from $table")
157160
}
158161
// validateCatalog has been set namespace with "use tidb_catalog.$dbPrefix$dummyDatabase" in beforeAll() method
159-
assert(caught.getMessage.contains(s"Table or view not found: $table"))
162+
assert(
163+
caught.getMessage.contains(s"Table or view not found: $table") ||
164+
// For Spark 3.4
165+
caught.getMessage.contains(s"The table or view `$table` cannot be found"))
160166
}
161167

162168
test(f"Show databases without privilege should not contains db") {

core/src/test/scala/com/pingcap/tispark/datasource/BaseDataSourceTest.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -89,15 +89,15 @@ class BaseDataSourceTest(val table: String, val database: String = "tispark_test
8989
this.jdbcWrite(data, schema)
9090
}
9191
assert(
92-
caughtJDBC.getCause.getClass.equals(jdbcErrorClass),
93-
s"${caughtJDBC.getCause.getClass.getName} not equals to ${jdbcErrorClass.getName}")
92+
jdbcErrorClass.isAssignableFrom(caughtJDBC.getCause.getClass),
93+
s"${jdbcErrorClass.getName} not assignable from ${caughtJDBC.getCause.getClass.getName}")
9494

9595
val caughtTiDB = intercept[SparkException] {
9696
this.tidbWrite(data, schema)
9797
}
9898
assert(
99-
caughtTiDB.getCause.getClass.equals(tidbErrorClass),
100-
s"${caughtTiDB.getCause.getClass.getName} not equals to ${tidbErrorClass.getName}")
99+
tidbErrorClass.isAssignableFrom(caughtTiDB.getCause.getClass),
100+
s"${tidbErrorClass.getName} not assignable from ${caughtTiDB.getCause.getClass.getName}")
101101

102102
if (tidbErrorMsg != null) {
103103
if (!msgStartWith) {

core/src/test/scala/com/pingcap/tispark/datasource/ExceptionTestSuite.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,9 @@ class ExceptionTestSuite extends BaseBatchWriteTest("test_datasource_exception_t
3333
tidbWrite(List(row1, row2), schema)
3434
}
3535
assert(
36-
caught.getMessage.contains(s"Table or view '$table' not found in database '$database'"))
36+
caught.getMessage.contains(s"Table or view '$table' not found in database '$database'") ||
37+
// For Spark 3.4
38+
caught.getMessage.contains(s"The table or view `$database`.`$table` cannot be found"))
3739
}
3840

3941
test("Test column does not exist") {

core/src/test/scala/org/apache/spark/sql/SparkDataTypeTestSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,9 @@ class SparkDataTypeTestSuite extends BaseTiSparkTest {
2929
test("double type test") {
3030
compSparkWithTiDB(
3131
qSpark =
32-
"select id_dt, tp_double, tp_float, tp_real from full_data_type_table order by tp_double desc nulls first limit 10",
32+
"select id_dt, tp_double, tp_float, tp_real from full_data_type_table order by tp_double desc nulls first, id_dt asc limit 10",
3333
qTiDB =
34-
"select id_dt, tp_double, tp_float, tp_real from full_data_type_table order by tp_double is null desc, tp_double desc limit 10")
34+
"select id_dt, tp_double, tp_float, tp_real from full_data_type_table order by tp_double is null desc, tp_double desc, id_dt asc limit 10")
3535
}
3636

3737
test("decimal type test") {

core/src/test/scala/org/apache/spark/sql/staleread/StaleReadSuite.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,10 @@ class StaleReadSuite extends BaseTiSparkTest {
9393
val caught = intercept[org.apache.spark.sql.AnalysisException] {
9494
spark.sql(s"select count(*) from $table").collect()
9595
}
96-
caught.getMessage() should include("Table or view not found")
96+
assert(
97+
caught.getMessage().contains("Table or view not found") ||
98+
// For spark 3.4
99+
caught.getMessage().contains(s"The table or view `$table` cannot be found"))
97100

98101
spark.conf.set(TiConfigConst.STALE_READ, t1)
99102
assert(1 == spark.sql(s"select * from $table").schema.fields.length)

pom.xml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -77,6 +77,7 @@
7777
<spark3_1.version>3.1.3</spark3_1.version>
7878
<spark3_2.version>3.2.3</spark3_2.version>
7979
<spark3_3.version>3.3.1</spark3_3.version>
80+
<spark3_4.version>3.4.2</spark3_4.version>
8081
<scala.binary.version>2.12</scala.binary.version>
8182
<scala.version>2.12.10</scala.version>
8283
<scala.version.release>2.12</scala.version.release>
@@ -175,6 +176,7 @@
175176
<module>spark-wrapper/spark-3.1</module>
176177
<module>spark-wrapper/spark-3.2</module>
177178
<module>spark-wrapper/spark-3.3</module>
179+
<module>spark-wrapper/spark-3.4</module>
178180
<module>assembly</module>
179181
</modules>
180182

@@ -212,6 +214,17 @@
212214
<spark.version.release>3.3</spark.version.release>
213215
</properties>
214216
</profile>
217+
<profile>
218+
<id>spark-3.4</id>
219+
<activation>
220+
<activeByDefault>false</activeByDefault>
221+
</activation>
222+
<properties>
223+
<spark.version.compile>${spark3_4.version}</spark.version.compile>
224+
<spark.version.test>${spark3_4.version}</spark.version.test>
225+
<spark.version.release>3.4</spark.version.release>
226+
</properties>
227+
</profile>
215228
<profile>
216229
<id>jenkins</id>
217230
<modules>

spark-wrapper/spark-3.4/pom.xml

Lines changed: 129 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,129 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
3+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4+
<modelVersion>4.0.0</modelVersion>
5+
<parent>
6+
<groupId>com.pingcap.tispark</groupId>
7+
<artifactId>tispark-parent</artifactId>
8+
<version>3.3.0</version>
9+
<relativePath>../../pom.xml</relativePath>
10+
</parent>
11+
12+
<artifactId>spark-wrapper-spark-3.4_${scala.version.release}</artifactId>
13+
<packaging>jar</packaging>
14+
<name>TiSpark Project Spark Wrapper Spark-3.4</name>
15+
<url>http://github.copm/pingcap/tispark</url>
16+
17+
<properties>
18+
<spark.version.wrapper>${spark3_4.version}</spark.version.wrapper>
19+
</properties>
20+
21+
<dependencies>
22+
<dependency>
23+
<groupId>com.pingcap.tispark</groupId>
24+
<artifactId>tispark-core-internal</artifactId>
25+
<version>${project.parent.version}</version>
26+
</dependency>
27+
<dependency>
28+
<groupId>org.apache.spark</groupId>
29+
<artifactId>spark-core_2.12</artifactId>
30+
<version>${spark.version.wrapper}</version>
31+
<exclusions>
32+
<exclusion>
33+
<groupId>org.apache.logging.log4j</groupId>
34+
<artifactId>log4j-slf4j-impl</artifactId>
35+
</exclusion>
36+
</exclusions>
37+
</dependency>
38+
<dependency>
39+
<groupId>org.apache.spark</groupId>
40+
<artifactId>spark-catalyst_2.12</artifactId>
41+
<version>${spark.version.wrapper}</version>
42+
</dependency>
43+
<dependency>
44+
<groupId>org.apache.spark</groupId>
45+
<artifactId>spark-sql_2.12</artifactId>
46+
<version>${spark.version.wrapper}</version>
47+
</dependency>
48+
</dependencies>
49+
50+
<build>
51+
<sourceDirectory>src/main/scala</sourceDirectory>
52+
<plugins>
53+
<plugin>
54+
<groupId>net.alchim31.maven</groupId>
55+
<artifactId>scala-maven-plugin</artifactId>
56+
<version>4.3.0</version>
57+
<executions>
58+
<execution>
59+
<id>compile-scala</id>
60+
<phase>compile</phase>
61+
<goals>
62+
<goal>add-source</goal>
63+
<goal>compile</goal>
64+
</goals>
65+
</execution>
66+
<execution>
67+
<id>test-compile-scala</id>
68+
<phase>test-compile</phase>
69+
<goals>
70+
<goal>add-source</goal>
71+
<goal>testCompile</goal>
72+
</goals>
73+
</execution>
74+
<execution>
75+
<id>attach-javadocs</id>
76+
<goals>
77+
<goal>doc-jar</goal>
78+
</goals>
79+
</execution>
80+
</executions>
81+
<configuration>
82+
<scalaVersion>${scala.version}</scalaVersion>
83+
</configuration>
84+
</plugin>
85+
<plugin>
86+
<groupId>org.apache.maven.plugins</groupId>
87+
<artifactId>maven-compiler-plugin</artifactId>
88+
<version>2.3.2</version>
89+
<configuration>
90+
<source>1.8</source>
91+
<target>1.8</target>
92+
<encoding>UTF-8</encoding>
93+
<showWarnings>true</showWarnings>
94+
<showDeprecation>true</showDeprecation>
95+
</configuration>
96+
</plugin>
97+
<!-- Source Plug-in -->
98+
<plugin>
99+
<groupId>org.apache.maven.plugins</groupId>
100+
<artifactId>maven-source-plugin</artifactId>
101+
<version>3.0.1</version>
102+
<executions>
103+
<execution>
104+
<id>attach-sources</id>
105+
<goals>
106+
<goal>jar-no-fork</goal>
107+
</goals>
108+
</execution>
109+
</executions>
110+
</plugin>
111+
<!-- Scala Format Plug-in -->
112+
<plugin>
113+
<groupId>org.antipathy</groupId>
114+
<artifactId>mvn-scalafmt_${scala.binary.version}</artifactId>
115+
<version>1.0.3</version>
116+
<configuration>
117+
<skipSources>${scalafmt.skip}</skipSources>
118+
<skipTestSources>${scalafmt.skip}</skipTestSources>
119+
<sourceDirectories> <!-- (Optional) Paths to source-directories. Overrides ${project.build.sourceDirectory} -->
120+
<sourceDirectory>${project.basedir}/src/main/scala</sourceDirectory>
121+
</sourceDirectories>
122+
<testSourceDirectories> <!-- (Optional) Paths to test-source-directories. Overrides ${project.build.testSourceDirectory} -->
123+
<param>${project.basedir}/src/test/scala</param>
124+
</testSourceDirectories>
125+
</configuration>
126+
</plugin>
127+
</plugins>
128+
</build>
129+
</project>

0 commit comments

Comments
 (0)