Skip to content

Commit bf04df8

Browse files
committed
fix
1 parent e86e2c5 commit bf04df8

File tree

2 files changed

+2
-3
lines changed

2 files changed

+2
-3
lines changed

backends-clickhouse/src-delta-32/test/scala/org/apache/spark/gluten/delta/GlutenDeltaParquetDeletionVectorSuite.scala

-2
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
*/
1717
package org.apache.spark.gluten.delta
1818

19-
import org.apache.gluten.config.GlutenConfig
2019
import org.apache.gluten.execution.{FileSourceScanExecTransformer, GlutenClickHouseTPCHAbstractSuite}
2120

2221
import org.apache.spark.SparkConf
@@ -47,7 +46,6 @@ class GlutenDeltaParquetDeletionVectorSuite
4746
.set("spark.sql.autoBroadcastJoinThreshold", "10MB")
4847
.set("spark.sql.adaptive.enabled", "true")
4948
.set("spark.sql.files.maxPartitionBytes", "20000000")
50-
.set(GlutenConfig.NATIVE_WRITER_ENABLED.key, spark35.toString)
5149
.set("spark.sql.storeAssignmentPolicy", "legacy")
5250
// .setCHConfig("use_local_format", true)
5351
.set("spark.databricks.delta.retentionDurationCheck.enabled", "false")

cpp-ch/local-engine/Storages/SubstraitSource/FileReader.cpp

+2-1
Original file line numberDiff line numberDiff line change
@@ -302,7 +302,8 @@ std::unique_ptr<NormalFileReader> createNormalFileReader(
302302
if (column.key() == delta::DeltaDVBitmapConfig::DELTA_ROW_INDEX_FILTER_TYPE)
303303
row_index_filter_type = toString(column.value());
304304
}
305-
return delta::DeltaReader::create(file, to_read_header_, output_header_, input_format, row_index_ids_encoded, row_index_filter_type);
305+
if (!row_index_ids_encoded.empty() && !row_index_filter_type.empty())
306+
return delta::DeltaReader::create(file, to_read_header_, output_header_, input_format, row_index_ids_encoded, row_index_filter_type);
306307
}
307308

308309
if (file->getFileInfo().has_iceberg())

0 commit comments

Comments
 (0)