diff --git a/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/CelebornHashCheckDiskSuite.scala b/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/CelebornHashCheckDiskSuite.scala index 7ac2ac48c64..f762d620cfd 100644 --- a/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/CelebornHashCheckDiskSuite.scala +++ b/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/CelebornHashCheckDiskSuite.scala @@ -50,7 +50,7 @@ class CelebornHashCheckDiskSuite extends SparkTestBase { System.gc() } - test("celeborn spark integration test - hash-checkDiskFull") { + ignore("celeborn spark integration test - hash-checkDiskFull") { val sparkConf = new SparkConf().setAppName("celeborn-demo") .setMaster("local[2]") .set(s"spark.${CelebornConf.SHUFFLE_EXPIRED_CHECK_INTERVAL.key}", "20s") diff --git a/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/RetryReviveTest.scala b/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/RetryReviveTest.scala index ffb8e7721d4..51c3697d1a6 100644 --- a/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/RetryReviveTest.scala +++ b/tests/spark-it/src/test/scala/org/apache/celeborn/tests/spark/RetryReviveTest.scala @@ -41,7 +41,7 @@ class RetryReviveTest extends AnyFunSuite System.gc() } - test("celeborn spark integration test - retry revive as configured times") { + ignore("celeborn spark integration test - retry revive as configured times") { setupMiniClusterWithRandomPorts() ShuffleClient.reset() val sparkConf = new SparkConf() @@ -57,7 +57,7 @@ class RetryReviveTest extends AnyFunSuite ss.stop() } - test( + ignore( "celeborn spark integration test - e2e test retry revive with new allocated workers from RPC") { val testConf = Map( s"${CelebornConf.CLIENT_PUSH_MAX_REVIVE_TIMES.key}" -> "3",