Skip to content

Commit 196686d

Browse files
committed
update
1 parent 80ffbf1 commit 196686d

File tree

8 files changed

+100
-0
lines changed

8 files changed

+100
-0
lines changed

gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala

+1
Original file line numberDiff line numberDiff line change
@@ -245,6 +245,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
245245
.excludeGlutenTest("distributeBy and localSort")
246246
.excludeGlutenTest("describe")
247247
.excludeGlutenTest("Allow leading/trailing whitespace in string before casting")
248+
.excludeGlutenTest("SPARK-27439: Explain result should match collected result after view change - re-write")
248249
enableSuite[GlutenDataFrameTimeWindowingSuite]
249250
.exclude("simple tumbling window with record at window start")
250251
.exclude("SPARK-21590: tumbling window using negative start time")

gluten-ut/spark32/src/test/scala/org/apache/spark/sql/GlutenDataFrameSuite.scala

+24
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.sql.internal.SQLConf
2828
import org.apache.spark.sql.test.SQLTestData.TestData2
2929
import org.apache.spark.sql.types.StringType
3030

31+
import java.io.ByteArrayOutputStream
3132
import java.nio.charset.StandardCharsets
3233

3334
import scala.util.Random
@@ -359,6 +360,29 @@ class GlutenDataFrameSuite extends DataFrameSuite with GlutenSQLTestsTrait {
359360
checkResult(df, expectedBinaryResult)
360361
}
361362

363+
test("SPARK-27439: Explain result should match collected result after view change - re-write") {
364+
withTempView("test", "test2", "tmp") {
365+
spark.range(10).createOrReplaceTempView("test")
366+
spark.range(5).createOrReplaceTempView("test2")
367+
spark.sql("select * from test").createOrReplaceTempView("tmp")
368+
val df = spark.sql("select * from tmp")
369+
spark.sql("select * from test2").createOrReplaceTempView("tmp")
370+
371+
val captured = new ByteArrayOutputStream()
372+
Console.withOut(captured) {
373+
df.explain(extended = true)
374+
}
375+
checkAnswer(df, spark.range(10).toDF)
376+
val output = captured.toString
377+
assert(output.contains("""== Parsed Logical Plan ==
378+
|'Project [*]
379+
|+- 'UnresolvedRelation [tmp]""".stripMargin))
380+
assert(output.contains("""== Physical Plan ==
381+
|*(1) ColumnarToRow
382+
|+- ColumnarRange 0, 10, 1, 2, 10, [id#0L]""".stripMargin))
383+
}
384+
}
385+
362386
private def withExpr(newExpr: Expression): Column = new Column(newExpr)
363387

364388
def equalizer(expr: Expression, other: Any): Column = withExpr {

gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala

+1
Original file line numberDiff line numberDiff line change
@@ -267,6 +267,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
267267
.excludeGlutenTest("distributeBy and localSort")
268268
.excludeGlutenTest("describe")
269269
.excludeGlutenTest("Allow leading/trailing whitespace in string before casting")
270+
.excludeGlutenTest("SPARK-27439: Explain result should match collected result after view change - re-write")
270271
enableSuite[GlutenDataFrameTimeWindowingSuite]
271272
.exclude("simple tumbling window with record at window start")
272273
.exclude("SPARK-21590: tumbling window using negative start time")

gluten-ut/spark33/src/test/scala/org/apache/spark/sql/GlutenDataFrameSuite.scala

+24
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.sql.internal.SQLConf
2828
import org.apache.spark.sql.test.SQLTestData.TestData2
2929
import org.apache.spark.sql.types.StringType
3030

31+
import java.io.ByteArrayOutputStream
3132
import java.nio.charset.StandardCharsets
3233

3334
import scala.util.Random
@@ -360,6 +361,29 @@ class GlutenDataFrameSuite extends DataFrameSuite with GlutenSQLTestsTrait {
360361
checkResult(df, expectedBinaryResult)
361362
}
362363

364+
test("SPARK-27439: Explain result should match collected result after view change - re-write") {
365+
withTempView("test", "test2", "tmp") {
366+
spark.range(10).createOrReplaceTempView("test")
367+
spark.range(5).createOrReplaceTempView("test2")
368+
spark.sql("select * from test").createOrReplaceTempView("tmp")
369+
val df = spark.sql("select * from tmp")
370+
spark.sql("select * from test2").createOrReplaceTempView("tmp")
371+
372+
val captured = new ByteArrayOutputStream()
373+
Console.withOut(captured) {
374+
df.explain(extended = true)
375+
}
376+
checkAnswer(df, spark.range(10).toDF)
377+
val output = captured.toString
378+
assert(output.contains("""== Parsed Logical Plan ==
379+
|'Project [*]
380+
|+- 'UnresolvedRelation [tmp]""".stripMargin))
381+
assert(output.contains("""== Physical Plan ==
382+
|*(1) ColumnarToRow
383+
|+- ColumnarRange 0, 10, 1, 2, 10, [id#0L]""".stripMargin))
384+
}
385+
}
386+
363387
private def withExpr(newExpr: Expression): Column = new Column(newExpr)
364388

365389
def equalizer(expr: Expression, other: Any): Column = withExpr {

gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala

+1
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
265265
.excludeGlutenTest("distributeBy and localSort")
266266
.excludeGlutenTest("describe")
267267
.excludeGlutenTest("Allow leading/trailing whitespace in string before casting")
268+
.excludeGlutenTest("SPARK-27439: Explain result should match collected result after view change - re-write")
268269
enableSuite[GlutenDataFrameTimeWindowingSuite]
269270
.exclude("simple tumbling window with record at window start")
270271
.exclude("SPARK-21590: tumbling window using negative start time")

gluten-ut/spark34/src/test/scala/org/apache/spark/sql/GlutenDataFrameSuite.scala

+24
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.sql.internal.SQLConf
2828
import org.apache.spark.sql.test.SQLTestData.TestData2
2929
import org.apache.spark.sql.types.StringType
3030

31+
import java.io.ByteArrayOutputStream
3132
import java.nio.charset.StandardCharsets
3233

3334
import scala.util.Random
@@ -360,6 +361,29 @@ class GlutenDataFrameSuite extends DataFrameSuite with GlutenSQLTestsTrait {
360361
checkResult(df, expectedBinaryResult)
361362
}
362363

364+
test("SPARK-27439: Explain result should match collected result after view change - re-write") {
365+
withTempView("test", "test2", "tmp") {
366+
spark.range(10).createOrReplaceTempView("test")
367+
spark.range(5).createOrReplaceTempView("test2")
368+
spark.sql("select * from test").createOrReplaceTempView("tmp")
369+
val df = spark.sql("select * from tmp")
370+
spark.sql("select * from test2").createOrReplaceTempView("tmp")
371+
372+
val captured = new ByteArrayOutputStream()
373+
Console.withOut(captured) {
374+
df.explain(extended = true)
375+
}
376+
checkAnswer(df, spark.range(10).toDF)
377+
val output = captured.toString
378+
assert(output.contains("""== Parsed Logical Plan ==
379+
|'Project [*]
380+
|+- 'UnresolvedRelation [tmp]""".stripMargin))
381+
assert(output.contains("""== Physical Plan ==
382+
|*(1) ColumnarToRow
383+
|+- ColumnarRange 0, 10, 1, 2, 10, [id#0L]""".stripMargin))
384+
}
385+
}
386+
363387
private def withExpr(newExpr: Expression): Column = new Column(newExpr)
364388

365389
def equalizer(expr: Expression, other: Any): Column = withExpr {

gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala

+1
Original file line numberDiff line numberDiff line change
@@ -265,6 +265,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
265265
.excludeGlutenTest("distributeBy and localSort")
266266
.excludeGlutenTest("describe")
267267
.excludeGlutenTest("Allow leading/trailing whitespace in string before casting")
268+
.excludeGlutenTest("SPARK-27439: Explain result should match collected result after view change - re-write")
268269
enableSuite[GlutenDataFrameTimeWindowingSuite]
269270
.exclude("simple tumbling window with record at window start")
270271
.exclude("SPARK-21590: tumbling window using negative start time")

gluten-ut/spark35/src/test/scala/org/apache/spark/sql/GlutenDataFrameSuite.scala

+24
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ import org.apache.spark.sql.internal.SQLConf
2929
import org.apache.spark.sql.test.SQLTestData.TestData2
3030
import org.apache.spark.sql.types.StringType
3131

32+
import java.io.ByteArrayOutputStream
3233
import java.nio.charset.StandardCharsets
3334

3435
import scala.util.Random
@@ -372,6 +373,29 @@ class GlutenDataFrameSuite extends DataFrameSuite with GlutenSQLTestsTrait {
372373
}
373374
}
374375

376+
test("SPARK-27439: Explain result should match collected result after view change - re-write") {
377+
withTempView("test", "test2", "tmp") {
378+
spark.range(10).createOrReplaceTempView("test")
379+
spark.range(5).createOrReplaceTempView("test2")
380+
spark.sql("select * from test").createOrReplaceTempView("tmp")
381+
val df = spark.sql("select * from tmp")
382+
spark.sql("select * from test2").createOrReplaceTempView("tmp")
383+
384+
val captured = new ByteArrayOutputStream()
385+
Console.withOut(captured) {
386+
df.explain(extended = true)
387+
}
388+
checkAnswer(df, spark.range(10).toDF)
389+
val output = captured.toString
390+
assert(output.contains("""== Parsed Logical Plan ==
391+
|'Project [*]
392+
|+- 'UnresolvedRelation [tmp]""".stripMargin))
393+
assert(output.contains("""== Physical Plan ==
394+
|*(1) ColumnarToRow
395+
|+- ColumnarRange 0, 10, 1, 2, 10, [id#0L]""".stripMargin))
396+
}
397+
}
398+
375399
private def withExpr(newExpr: Expression): Column = new Column(newExpr)
376400

377401
def equalizer(expr: Expression, other: Any): Column = withExpr {

0 commit comments

Comments
 (0)