Skip to content

Commit 826dee5

Browse files
committed
[HUDI-9678][TEST][FOLLOWUP] Add a unit test to cover cases that insert table with hive supported SparkSession
1 parent 6971558 commit 826dee5

File tree

2 files changed

+51
-1
lines changed

2 files changed

+51
-1
lines changed

hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/common/HoodieSparkSqlTestBase.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ class HoodieSparkSqlTestBase extends FunSuite with BeforeAndAfterAll {
5959
org.apache.log4j.Logger.getRootLogger.setLevel(org.apache.log4j.Level.WARN)
6060
private val LOG = LoggerFactory.getLogger(getClass)
6161

62-
private lazy val sparkWareHouse = {
62+
protected lazy val sparkWareHouse = {
6363
val dir = Utils.createTempDir()
6464
Utils.deleteRecursively(dir)
6565
dir
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing,
13+
* software distributed under the License is distributed on an
14+
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
* KIND, either express or implied. See the License for the
16+
* specific language governing permissions and limitations
17+
* under the License.
18+
*/
19+
20+
package org.apache.spark.sql.hudi.dml.insert
21+
22+
import org.apache.hudi.sync.common.HoodieSyncTool
23+
24+
import org.apache.hadoop.conf.Configuration
25+
import org.apache.spark.sql.SparkSession
26+
27+
import java.util.Properties
28+
29+
class TestInsertTableWithHiveSupport extends TestInsertTable {
30+
31+
override lazy val spark: SparkSession = SparkSession.builder()
32+
.config("spark.sql.warehouse.dir", sparkWareHouse.getCanonicalPath)
33+
.config("spark.sql.session.timeZone", "UTC")
34+
.config("hoodie.insert.shuffle.parallelism", "4")
35+
.config("hoodie.upsert.shuffle.parallelism", "4")
36+
.config("hoodie.delete.shuffle.parallelism", "4")
37+
.config("hoodie.datasource.hive_sync.enable", "false")
38+
.config("hoodie.datasource.meta.sync.enable", "false")
39+
.config("hoodie.meta.sync.client.tool.class", classOf[DummySyncTool].getName)
40+
.config(sparkConf())
41+
.enableHiveSupport()
42+
.getOrCreate()
43+
44+
}
45+
46+
class DummySyncTool(props: Properties, hadoopConf: Configuration) extends HoodieSyncTool(props, hadoopConf) {
47+
override def syncHoodieTable(): Unit = {
48+
// do nothing here
49+
}
50+
}

0 commit comments

Comments
 (0)