Skip to content

Commit

Permalink
fix compile errors
Browse files Browse the repository at this point in the history
  • Loading branch information
zzcclp committed Mar 10, 2025
1 parent e809800 commit 50f2c0f
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ class Spark32Shims extends SparkShims {
override def supportDuplicateReadingTracking: Boolean = false

def getFileStatus(partition: PartitionDirectory): Seq[(FileStatus, Map[String, Any])] =
partition.files.map(f => (f, Map.empty))
partition.files.map(f => (f, Map.empty[String, Any]))

def isFileSplittable(
relation: HadoopFsRelation,
Expand All @@ -215,7 +215,7 @@ class Spark32Shims extends SparkShims {
isSplitable: Boolean,
maxSplitBytes: Long,
partitionValues: InternalRow,
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile]
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile] = {
PartitionedFileUtil.splitFiles(
sparkSession,
file,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ class Spark33Shims extends SparkShims {
override def supportDuplicateReadingTracking: Boolean = true

def getFileStatus(partition: PartitionDirectory): Seq[(FileStatus, Map[String, Any])] =
partition.files.map(f => (f, Map.empty))
partition.files.map(f => (f, Map.empty[String, Any]))

def isFileSplittable(
relation: HadoopFsRelation,
Expand All @@ -307,7 +307,7 @@ class Spark33Shims extends SparkShims {
isSplitable: Boolean,
maxSplitBytes: Long,
partitionValues: InternalRow,
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile]
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile] = {
PartitionedFileUtil.splitFiles(
sparkSession,
file,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ class Spark34Shims extends SparkShims {
override def supportDuplicateReadingTracking: Boolean = true

def getFileStatus(partition: PartitionDirectory): Seq[(FileStatus, Map[String, Any])] =
partition.files.map(f => (f, Map.empty))
partition.files.map(f => (f, Map.empty[String, Any]))

def isFileSplittable(
relation: HadoopFsRelation,
Expand Down Expand Up @@ -386,7 +386,7 @@ class Spark34Shims extends SparkShims {
isSplitable: Boolean,
maxSplitBytes: Long,
partitionValues: InternalRow,
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile]
metadata: Map[String, Any] = Map.empty): Seq[PartitionedFile] = {
PartitionedFileUtil.splitFiles(
sparkSession,
file,
Expand Down

0 comments on commit 50f2c0f

Please sign in to comment.