@@ -12,6 +12,7 @@ import org.apache.log4j.Logger
12
12
import scala .collection .JavaConverters ._
13
13
import com .typesafe .config .{Config , ConfigFactory }
14
14
import com .vesoft .nebula .algorithm .config .Configs .readConfig
15
+ import com .vesoft .nebula .algorithm .config .Configs .getOrElse
15
16
16
17
import scala .collection .mutable
17
18
@@ -129,6 +130,51 @@ object LocalConfigEntry {
129
130
}
130
131
}
131
132
133
+
134
+ object HiveConfigEntry {
135
+ def apply (config : Config ): HiveConfigEntry = {
136
+ // uri of hive metastore. eg: thrift://127.0.0.1:9083
137
+ val hiveMetaStoreUris : String = getOrElse(config, " hive.metaStoreUris" , " " )
138
+ val readConfigEntry = buildReadConfig(config)
139
+ val writeConfigEntry = buildWriteConfig(config)
140
+ HiveConfigEntry (hiveMetaStoreUris,readConfigEntry, writeConfigEntry)
141
+ }
142
+
143
+ def buildReadConfig (config : Config ): HiveReadConfigEntry = {
144
+ // source data of spark sql
145
+ val sql : String = getOrElse(config, " hive.read.sql" , " " )
146
+ // the source vertex ID is mapped with the SQL result column name
147
+ val srcIdCol : String = getOrElse(config, " hive.read.srcId" , " " )
148
+ // the dest vertex ID is mapped with the SQL result column name
149
+ val dstIdCol : String = getOrElse(config, " hive.read.dstId" , " " )
150
+ // the weight is mapped with the SQL result column name
151
+ val weightCol : String = getOrElse(config, " hive.read.weight" , " " )
152
+ HiveReadConfigEntry (sql, srcIdCol, dstIdCol, weightCol)
153
+ }
154
+
155
+ def buildWriteConfig (config : Config ): HiveWriteConfigEntry = {
156
+ // algo result save to hive table
157
+ val dbTableName : String = getOrElse(config, " hive.write.dbTableName" , " " )
158
+ // save mode of spark
159
+ val saveMode : String = getOrElse(config, " hive.write.saveMode" , " " )
160
+ // Whether the table is automatically created
161
+ val autoCreateTable : Boolean = getOrElse(config, " hive.write.autoCreateTable" , true )
162
+ // algo results dataframe column and hive table column mapping relationships
163
+ val resultColumnMapping = mutable.Map [String , String ]()
164
+ val mappingKey = " hive.write.resultTableColumnMapping"
165
+ if (config.hasPath(mappingKey)) {
166
+ val mappingConfig = config.getObject(mappingKey)
167
+ for (subkey <- mappingConfig.unwrapped().keySet().asScala) {
168
+ val key = s " ${mappingKey}. ${subkey}"
169
+ val value = config.getString(key)
170
+ resultColumnMapping += subkey -> value
171
+ }
172
+ }
173
+ HiveWriteConfigEntry (dbTableName, saveMode, autoCreateTable, resultColumnMapping)
174
+ }
175
+
176
+ }
177
+
132
178
/**
133
179
* SparkConfigEntry support key-value pairs for spark session.
134
180
*
@@ -173,6 +219,34 @@ case class LocalConfigEntry(filePath: String,
173
219
}
174
220
}
175
221
222
+ case class HiveConfigEntry (hiveMetaStoreUris : String ,
223
+ hiveReadConfigEntry : HiveReadConfigEntry ,
224
+ hiveWriteConfigEntry : HiveWriteConfigEntry ) {
225
+ override def toString : String = {
226
+ s " HiveConfigEntry: {hiveMetaStoreUris: $hiveMetaStoreUris, read: $hiveReadConfigEntry, write: $hiveWriteConfigEntry} "
227
+ }
228
+ }
229
+
230
+ case class HiveReadConfigEntry (sql : String ,
231
+ srcIdCol : String = " srcId" ,
232
+ dstIdCol : String = " dstId" ,
233
+ weightCol : String ) {
234
+ override def toString : String = {
235
+ s " HiveReadConfigEntry: {sql: $sql, srcIdCol: $srcIdCol, dstIdCol: $dstIdCol, " +
236
+ s " weightCol: $weightCol} "
237
+ }
238
+ }
239
+
240
+ case class HiveWriteConfigEntry (dbTableName : String ,
241
+ saveMode : String ,
242
+ autoCreateTable : Boolean ,
243
+ resultColumnMapping : mutable.Map [String , String ]) {
244
+ override def toString : String = {
245
+ s " HiveWriteConfigEntry: {dbTableName: $dbTableName, saveMode= $saveMode, " +
246
+ s " autoCreateTable= $autoCreateTable, resultColumnMapping= $resultColumnMapping} "
247
+ }
248
+ }
249
+
176
250
/**
177
251
* NebulaConfigEntry
178
252
* @param readConfigEntry config for nebula-spark-connector reader
@@ -218,6 +292,7 @@ case class Configs(sparkConfig: SparkConfigEntry,
218
292
dataSourceSinkEntry : DataSourceSinkEntry ,
219
293
nebulaConfig : NebulaConfigEntry ,
220
294
localConfigEntry : LocalConfigEntry ,
295
+ hiveConfigEntry : HiveConfigEntry ,
221
296
algorithmConfig : AlgorithmConfigEntry )
222
297
223
298
object Configs {
@@ -237,10 +312,11 @@ object Configs {
237
312
val dataSourceEntry = DataSourceSinkEntry (config)
238
313
val localConfigEntry = LocalConfigEntry (config)
239
314
val nebulaConfigEntry = NebulaConfigEntry (config)
240
- val sparkEntry = SparkConfigEntry (config)
241
- val algorithmEntry = AlgorithmConfigEntry (config)
315
+ val hiveConfigEntry = HiveConfigEntry (config)
316
+ val sparkEntry = SparkConfigEntry (config)
317
+ val algorithmEntry = AlgorithmConfigEntry (config)
242
318
243
- Configs (sparkEntry, dataSourceEntry, nebulaConfigEntry, localConfigEntry, algorithmEntry)
319
+ Configs (sparkEntry, dataSourceEntry, nebulaConfigEntry, localConfigEntry, hiveConfigEntry, algorithmEntry)
244
320
}
245
321
246
322
/**
@@ -277,15 +353,15 @@ object Configs {
277
353
}
278
354
279
355
/**
280
- * Get the value from config by the path. If the path not exist, return the default value.
281
- *
282
- * @param config The config.
283
- * @param path The path of the config.
284
- * @param defaultValue The default value for the path.
285
- *
286
- * @return
287
- */
288
- private [ this ] def getOrElse [T ](config : Config , path : String , defaultValue : T ): T = {
356
+ * Get the value from config by the path. If the path not exist, return the default value.
357
+ *
358
+ * @param config The config.
359
+ * @param path The path of the config.
360
+ * @param defaultValue The default value for the path.
361
+ *
362
+ * @return
363
+ */
364
+ def getOrElse [T ](config : Config , path : String , defaultValue : T ): T = {
289
365
if (config.hasPath(path)) {
290
366
config.getAnyRef(path).asInstanceOf [T ]
291
367
} else {
0 commit comments