DHP-Explorer/src/main/java/com/sandro/app/SparkApp.scala

63 lines
1.5 KiB
Scala

package com.sandro.app
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.slf4j.Logger
import scala.collection.mutable
trait SparkApp extends Serializable {
/** Utility to parse the arguments.properties using the
* property json in the classpath identified from
* the variable propertyPath
*
* @param args the list of arguments.properties
*/
def parseArguments(args: Array[String]): mutable.Map[String, String] = {
SparkUtility.parseArguments(args)
}
/** Here all the spark applications runs this method
* where the whole logic of the spark node is defined
*/
def run(): Unit
}
abstract class AbstractScalaApplication(val args: Array[String],
log: Logger
) extends SparkApp {
var argumentMap: mutable.Map[String, String] = _
var spark: SparkSession = _
def initialize(): SparkApp = {
argumentMap = parseArguments(args)
spark = createSparkSession()
spark.sparkContext.setLogLevel("WARN")
this
}
/** Utility for creating a spark session starting from parser
*
* @return a spark Session
*/
private def createSparkSession(): SparkSession = {
require(argumentMap != null)
val conf: SparkConf = new SparkConf()
val master = argumentMap("master")
log.info(s"Creating Spark session: Master: $master")
SparkSession
.builder()
.config(conf)
.appName(getClass.getSimpleName)
.master(master)
.getOrCreate()
}
}