Skip to content

Commit 3e03f44

Browse files
committed
fix compile errors
1 parent 77a4369 commit 3e03f44

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

shims/spark32/src/main/scala/org/apache/gluten/sql/shims/spark32/Spark32Shims.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,7 @@ class Spark32Shims extends SparkShims {
197197
override def supportDuplicateReadingTracking: Boolean = false
198198

199199
def getFileStatus(partition: PartitionDirectory): Seq[(FileStatus, Map[String, Any])] =
200-
partition.files.map(f => (f, Map.empty))
200+
partition.files.map(f => (f, Map.empty[String, Any]))
201201

202202
def isFileSplittable(
203203
relation: HadoopFsRelation,
@@ -215,7 +215,7 @@ class Spark32Shims extends SparkShims {
215215
isSplitable: Boolean,
216216
maxSplitBytes: Long,
217217
partitionValues: InternalRow,
218-
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile]
218+
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile] = {
219219
PartitionedFileUtil.splitFiles(
220220
sparkSession,
221221
file,

shims/spark33/src/main/scala/org/apache/gluten/sql/shims/spark33/Spark33Shims.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ class Spark33Shims extends SparkShims {
289289
override def supportDuplicateReadingTracking: Boolean = true
290290

291291
def getFileStatus(partition: PartitionDirectory): Seq[(FileStatus, Map[String, Any])] =
292-
partition.files.map(f => (f, Map.empty))
292+
partition.files.map(f => (f, Map.empty[String, Any]))
293293

294294
def isFileSplittable(
295295
relation: HadoopFsRelation,
@@ -307,7 +307,7 @@ class Spark33Shims extends SparkShims {
307307
isSplitable: Boolean,
308308
maxSplitBytes: Long,
309309
partitionValues: InternalRow,
310-
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile]
310+
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile] = {
311311
PartitionedFileUtil.splitFiles(
312312
sparkSession,
313313
file,

shims/spark34/src/main/scala/org/apache/gluten/sql/shims/spark34/Spark34Shims.scala

+2-2
Original file line numberDiff line numberDiff line change
@@ -346,7 +346,7 @@ class Spark34Shims extends SparkShims {
346346
override def supportDuplicateReadingTracking: Boolean = true
347347

348348
def getFileStatus(partition: PartitionDirectory): Seq[(FileStatus, Map[String, Any])] =
349-
partition.files.map(f => (f, Map.empty))
349+
partition.files.map(f => (f, Map.empty[String, Any]))
350350

351351
def isFileSplittable(
352352
relation: HadoopFsRelation,
@@ -386,7 +386,7 @@ class Spark34Shims extends SparkShims {
386386
isSplitable: Boolean,
387387
maxSplitBytes: Long,
388388
partitionValues: InternalRow,
389-
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile]
389+
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile] = {
390390
PartitionedFileUtil.splitFiles(
391391
sparkSession,
392392
file,

0 commit comments

Comments
 (0)