Skip to content

Commit dab39fb

Browse files
resolve comments
1 parent e6bdb53 commit dab39fb

File tree

3 files changed

+11
-21
lines changed

3 files changed

+11
-21
lines changed

connector/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/v2/join/OracleJoinPushdownIntegrationSuite.scala

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,6 @@ package org.apache.spark.sql.jdbc.v2.join
2020
import java.sql.Connection
2121
import java.util.Locale
2222

23-
import org.apache.spark.SparkConf
24-
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
2523
import org.apache.spark.sql.jdbc.{DockerJDBCIntegrationSuite, JdbcDialect, OracleDatabaseOnDocker, OracleDialect}
2624
import org.apache.spark.sql.jdbc.v2.JDBCV2JoinPushdownIntegrationSuiteBase
2725
import org.apache.spark.tags.DockerTest
@@ -57,22 +55,12 @@ import org.apache.spark.tags.DockerTest
5755
class OracleJoinPushdownIntegrationSuite
5856
extends DockerJDBCIntegrationSuite
5957
with JDBCV2JoinPushdownIntegrationSuiteBase {
60-
override val catalogName: String = "oracle"
61-
6258
override val namespaceOpt: Option[String] = Some("SYSTEM")
6359

6460
override val db = new OracleDatabaseOnDocker
6561

6662
override val url = db.getJdbcUrl(dockerIp, externalPort)
6763

68-
override def sparkConf: SparkConf = super.sparkConf
69-
.set(s"spark.sql.catalog.$catalogName", classOf[JDBCTableCatalog].getName)
70-
.set(s"spark.sql.catalog.$catalogName.url", url)
71-
.set(s"spark.sql.catalog.$catalogName.pushDownJoin", "true")
72-
.set(s"spark.sql.catalog.$catalogName.pushDownAggregate", "true")
73-
.set(s"spark.sql.catalog.$catalogName.pushDownLimit", "true")
74-
.set(s"spark.sql.catalog.$catalogName.pushDownOffset", "true")
75-
7664
override val jdbcDialect: JdbcDialect = OracleDialect()
7765

7866
override def caseConvert(tableName: String): String = tableName.toUpperCase(Locale.ROOT)

sql/core/src/test/scala/org/apache/spark/sql/jdbc/v2/JDBCV2JoinPushdownIntegrationSuiteBase.scala

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,11 @@ package org.apache.spark.sql.jdbc.v2
2020
import java.sql.{Connection, DriverManager}
2121
import java.util.Properties
2222

23+
import org.apache.spark.SparkConf
2324
import org.apache.spark.sql.QueryTest
2425
import org.apache.spark.sql.connector.DataSourcePushdownTestUtils
2526
import org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils
27+
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
2628
import org.apache.spark.sql.internal.SQLConf
2729
import org.apache.spark.sql.jdbc.JdbcDialect
2830
import org.apache.spark.sql.test.SharedSparkSession
@@ -32,7 +34,7 @@ trait JDBCV2JoinPushdownIntegrationSuiteBase
3234
extends QueryTest
3335
with SharedSparkSession
3436
with DataSourcePushdownTestUtils {
35-
val catalogName: String
37+
val catalogName: String = "join_pushdown_catalog"
3638
val namespaceOpt: Option[String] = None
3739
val url: String
3840

@@ -41,6 +43,14 @@ trait JDBCV2JoinPushdownIntegrationSuiteBase
4143

4244
val jdbcDialect: JdbcDialect
4345

46+
override def sparkConf: SparkConf = super.sparkConf
47+
.set(s"spark.sql.catalog.$catalogName", classOf[JDBCTableCatalog].getName)
48+
.set(s"spark.sql.catalog.$catalogName.url", url)
49+
.set(s"spark.sql.catalog.$catalogName.pushDownJoin", "true")
50+
.set(s"spark.sql.catalog.$catalogName.pushDownAggregate", "true")
51+
.set(s"spark.sql.catalog.$catalogName.pushDownLimit", "true")
52+
.set(s"spark.sql.catalog.$catalogName.pushDownOffset", "true")
53+
4454
private def catalogAndNamespace =
4555
namespaceOpt.map(namespace => s"$catalogName.$namespace").getOrElse(catalogName)
4656

sql/core/src/test/scala/org/apache/spark/sql/jdbc/v2/JDBCV2JoinPushdownSuite.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import java.sql.Connection
2222
import org.apache.spark.SparkConf
2323
import org.apache.spark.sql.{ExplainSuiteHelper, QueryTest}
2424
import org.apache.spark.sql.connector.DataSourcePushdownTestUtils
25-
import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog
2625
import org.apache.spark.sql.jdbc.{H2Dialect, JdbcDialect}
2726
import org.apache.spark.sql.test.SharedSparkSession
2827
import org.apache.spark.util.Utils
@@ -36,19 +35,12 @@ class JDBCV2JoinPushdownSuite
3635
val tempDir = Utils.createTempDir()
3736
override val url = s"jdbc:h2:${tempDir.getCanonicalPath};user=testUser;password=testPass"
3837

39-
override val catalogName: String = "h2"
4038
override val namespaceOpt: Option[String] = Some("test")
4139

4240
override val jdbcDialect: JdbcDialect = H2Dialect()
4341

4442
override def sparkConf: SparkConf = super.sparkConf
45-
.set("spark.sql.catalog.h2", classOf[JDBCTableCatalog].getName)
46-
.set("spark.sql.catalog.h2.url", url)
4743
.set("spark.sql.catalog.h2.driver", "org.h2.Driver")
48-
.set("spark.sql.catalog.h2.pushDownAggregate", "true")
49-
.set("spark.sql.catalog.h2.pushDownLimit", "true")
50-
.set("spark.sql.catalog.h2.pushDownOffset", "true")
51-
.set("spark.sql.catalog.h2.pushDownJoin", "true")
5244

5345
override def qualifyTableName(tableName: String): String = namespaceOpt
5446
.map(namespace => s""""$namespace"."$tableName"""").getOrElse(s""""$tableName"""")

0 commit comments

Comments
 (0)