Skip to content

Commit 0c4f5f7

Browse files
author
zhangrenhua
committed
Add unit tests for creating lots of tables
1 parent 5b2d69f commit 0c4f5f7

File tree

1 file changed

+54
-0
lines changed

1 file changed

+54
-0
lines changed
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hudi.utilities.functional;
20+
21+
import org.apache.hudi.utilities.testutils.SparkClientFunctionalTestHarnessWithHiveSupport;
22+
import org.apache.spark.sql.SparkSession;
23+
import org.junit.jupiter.api.Disabled;
24+
import org.junit.jupiter.api.Tag;
25+
import org.junit.jupiter.api.Test;
26+
27+
/**
28+
* Test if creating a large number of hive tables will cause memory overflow {@link TestCreateLargeTableProvider}.
29+
*/
30+
@Tag("functional")
31+
public class TestCreateLargeTableProvider extends SparkClientFunctionalTestHarnessWithHiveSupport {
32+
33+
// /**
34+
// * Running this method locally can reproduce the memory overflow caused by the HiveClient object not being released.
35+
// */
36+
// @Disabled
37+
// @Test
38+
// public void testHiveClientOOM() {
39+
// SparkSession spark = spark();
40+
// for (int i = 0; i < 10000; i++) {
41+
// HiveClientUtils.newClientForMetadata(spark.sparkContext().getConf(), spark.sessionState().newHadoopConf());
42+
// }
43+
// }
44+
45+
@Disabled
46+
@Test
47+
public void createLargeHiveTable() {
48+
SparkSession spark = spark();
49+
for (int i = 0; i < 10000; i++) {
50+
spark.sql("drop table if exists test_" + i);
51+
spark.sql(String.format("CREATE TABLE test_%s(wid string,csny string) using hudi tblproperties (type = 'mor',primaryKey = 'wid')", i));
52+
}
53+
}
54+
}

0 commit comments

Comments
 (0)