Commit 3e03f44 1 parent 77a4369 commit 3e03f44 Copy full SHA for 3e03f44
File tree 3 files changed +6
-6
lines changed
spark32/src/main/scala/org/apache/gluten/sql/shims/spark32
spark33/src/main/scala/org/apache/gluten/sql/shims/spark33
spark34/src/main/scala/org/apache/gluten/sql/shims/spark34
3 files changed +6
-6
lines changed Original file line number Diff line number Diff line change @@ -197,7 +197,7 @@ class Spark32Shims extends SparkShims {
197
197
override def supportDuplicateReadingTracking : Boolean = false
198
198
199
199
def getFileStatus (partition : PartitionDirectory ): Seq [(FileStatus , Map [String , Any ])] =
200
- partition.files.map(f => (f, Map .empty))
200
+ partition.files.map(f => (f, Map .empty[ String , Any ] ))
201
201
202
202
def isFileSplittable (
203
203
relation : HadoopFsRelation ,
@@ -215,7 +215,7 @@ class Spark32Shims extends SparkShims {
215
215
isSplitable : Boolean ,
216
216
maxSplitBytes : Long ,
217
217
partitionValues : InternalRow ,
218
- metadata : Map [String , Any ] = Map .empty): Seq [PartitionedFile ]
218
+ metadata : Map [String , Any ] = Map .empty): Seq [PartitionedFile ] = {
219
219
PartitionedFileUtil .splitFiles(
220
220
sparkSession,
221
221
file,
Original file line number Diff line number Diff line change @@ -289,7 +289,7 @@ class Spark33Shims extends SparkShims {
289
289
override def supportDuplicateReadingTracking : Boolean = true
290
290
291
291
def getFileStatus (partition : PartitionDirectory ): Seq [(FileStatus , Map [String , Any ])] =
292
- partition.files.map(f => (f, Map .empty))
292
+ partition.files.map(f => (f, Map .empty[ String , Any ] ))
293
293
294
294
def isFileSplittable (
295
295
relation : HadoopFsRelation ,
@@ -307,7 +307,7 @@ class Spark33Shims extends SparkShims {
307
307
isSplitable : Boolean ,
308
308
maxSplitBytes : Long ,
309
309
partitionValues : InternalRow ,
310
- metadata : Map [String , Any ] = Map .empty): Seq [PartitionedFile ]
310
+ metadata : Map [String , Any ] = Map .empty): Seq [PartitionedFile ] = {
311
311
PartitionedFileUtil .splitFiles(
312
312
sparkSession,
313
313
file,
Original file line number Diff line number Diff line change @@ -346,7 +346,7 @@ class Spark34Shims extends SparkShims {
346
346
override def supportDuplicateReadingTracking : Boolean = true
347
347
348
348
def getFileStatus (partition : PartitionDirectory ): Seq [(FileStatus , Map [String , Any ])] =
349
- partition.files.map(f => (f, Map .empty))
349
+ partition.files.map(f => (f, Map .empty[ String , Any ] ))
350
350
351
351
def isFileSplittable (
352
352
relation : HadoopFsRelation ,
@@ -386,7 +386,7 @@ class Spark34Shims extends SparkShims {
386
386
isSplitable : Boolean ,
387
387
maxSplitBytes : Long ,
388
388
partitionValues : InternalRow ,
389
- metadata : Map [String , Any ] = Map .empty): Seq [PartitionedFile ]
389
+ metadata : Map [String , Any ] = Map .empty): Seq [PartitionedFile ] = {
390
390
PartitionedFileUtil .splitFiles(
391
391
sparkSession,
392
392
file,
You can’t perform that action at this time.
0 commit comments