Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[GLUTEN-7690][CORE] GlutenConfig should using SQLConf provided by SparkSession first #8962

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ import org.apache.gluten.config.GlutenConfig

import org.apache.spark.SparkConf
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.sql.internal.SQLConf

object CHConfig {
private[clickhouse] val BACKEND_NAME: String = "ch"
Expand Down Expand Up @@ -57,7 +56,7 @@ object CHConfig {
def startWithSettingsPrefix(key: String): Boolean = key.startsWith(RUNTIME_SETTINGS)
def removeSettingsPrefix(key: String): String = key.substring(RUNTIME_SETTINGS.length + 1)

def get: CHConfig = new CHConfig(SQLConf.get)
def get: CHConfig = new CHConfig()

import GlutenConfig._

Expand Down Expand Up @@ -106,7 +105,7 @@ object CHConfig {
.createWithDefault(false)
}

class CHConfig(conf: SQLConf) extends GlutenConfig(conf) {
class CHConfig extends GlutenConfig {
import CHConfig._

def enableOnePipelineMergeTreeWrite: Boolean =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,11 @@ package org.apache.gluten.config
import org.apache.gluten.config.GlutenConfig.{buildConf, buildStaticConf, COLUMNAR_MAX_BATCH_SIZE}

import org.apache.spark.network.util.ByteUnit
import org.apache.spark.sql.internal.SQLConf

import java.util.Locale
import java.util.concurrent.TimeUnit

class VeloxConfig(conf: SQLConf) extends GlutenConfig(conf) {
class VeloxConfig extends GlutenConfig {
import VeloxConfig._

def veloxColumnarWindowType: String = getConf(COLUMNAR_VELOX_WINDOW_TYPE)
Expand Down Expand Up @@ -64,9 +63,7 @@ class VeloxConfig(conf: SQLConf) extends GlutenConfig(conf) {

object VeloxConfig {

def get: VeloxConfig = {
new VeloxConfig(SQLConf.get)
}
def get: VeloxConfig = new VeloxConfig()

val COLUMNAR_VELOX_WINDOW_TYPE =
buildConf("spark.gluten.sql.columnar.backend.velox.window.type")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ object ColumnarRuleApplier {
val session: SparkSession,
val caller: CallerInfo,
val outputsColumnar: Boolean) {
val glutenConf: GlutenConfig = {
new GlutenConfig(session.sessionState.conf)
}
val glutenConf: GlutenConfig = new GlutenConfig()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ class GlutenInjector private[injector] (control: InjectorControl) {
}

private def applier(session: SparkSession): ColumnarRuleApplier = {
val conf = new GlutenConfig(session.sessionState.conf)
if (conf.enableRas) {
val glutenConf = new GlutenConfig(session)
if (glutenConf.enableRas) {
return ras.createApplier(session)
}
legacy.createApplier(session)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -696,7 +696,7 @@ object ExpressionConverter extends SQLConfHelper with Logging {
)
case t: TransformKeys =>
// default is `EXCEPTION`
val mapKeyDedupPolicy = SQLConf.get.getConf(SQLConf.MAP_KEY_DEDUP_POLICY)
val mapKeyDedupPolicy = conf.getConf(SQLConf.MAP_KEY_DEDUP_POLICY)
if (mapKeyDedupPolicy == SQLConf.MapKeyDedupPolicy.LAST_WIN.toString) {
// TODO: Remove after fix ready for
// https://github.com/facebookincubator/velox/issues/10219
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package org.apache.gluten.config

import org.apache.spark.internal.Logging
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.{GlutenConfigUtil, SQLConf, SQLConfProvider}

import com.google.common.collect.ImmutableList
Expand All @@ -33,9 +34,13 @@ case class GlutenNumaBindingInfo(
totalCoreRange: Array[String] = null,
numCoresPerExecutor: Int = -1) {}

class GlutenConfig(conf: SQLConf) extends Logging {
class GlutenConfig(sessionOpt: Option[SparkSession] = None) extends Logging {
import GlutenConfig._

def this(spark: SparkSession) = this(Some(spark))

def conf: SQLConf = sessionOpt.map(_.sessionState.conf).getOrElse(SQLConf.get)

private lazy val configProvider = new SQLConfProvider(conf)

def getConf[T](entry: ConfigEntry[T]): T = {
Expand Down Expand Up @@ -436,9 +441,7 @@ object GlutenConfig {
val SPARK_SHUFFLE_SPILL_COMPRESS = "spark.shuffle.spill.compress"
val SPARK_SHUFFLE_SPILL_COMPRESS_DEFAULT: Boolean = true

def get: GlutenConfig = {
new GlutenConfig(SQLConf.get)
}
def get: GlutenConfig = new GlutenConfig()

def prefixOf(backendName: String): String = {
GLUTEN_CONFIG_PREFIX + backendName
Expand Down
Loading