diff --git a/dhp-common/pom.xml b/dhp-common/pom.xml
index 4a13b3459..7b18f0105 100644
--- a/dhp-common/pom.xml
+++ b/dhp-common/pom.xml
@@ -21,6 +21,43 @@
This module contains common utilities meant to be used across the dnet-hadoop submodules
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ ${net.alchim31.maven.version}
+
+
+ scala-compile-first
+ initialize
+
+ add-source
+ compile
+
+
+
+ scala-test-compile
+ process-test-resources
+
+ testCompile
+
+
+
+ scala-doc
+ process-resources
+
+ doc
+
+
+
+
+ ${scala.version}
+
+
+
+
+
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/application/AbstractScalaApplication.scala b/dhp-common/src/main/java/eu/dnetlib/dhp/application/AbstractScalaApplication.scala
deleted file mode 100644
index 44dad93eb..000000000
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/application/AbstractScalaApplication.scala
+++ /dev/null
@@ -1,37 +0,0 @@
-package eu.dnetlib.dhp.application
-
-import org.apache.spark.SparkConf
-import org.apache.spark.sql.SparkSession
-import org.slf4j.Logger
-
-abstract class AbstractScalaApplication (val propertyPath:String, val args:Array[String], log:Logger) extends SparkScalaApplication {
-
- var parser: ArgumentApplicationParser = null
-
- var spark:SparkSession = null
-
-
- def initialize():SparkScalaApplication = {
- parser = parseArguments(args)
- spark = createSparkSession()
- this
- }
-
- /**
- * Utility for creating a spark session starting from parser
- *
- * @return a spark Session
- */
- private def createSparkSession():SparkSession = {
- require(parser!= null)
-
- val conf:SparkConf = new SparkConf()
- val master = parser.get("master")
- log.info(s"Creating Spark session: Master: $master")
- SparkSession.builder().config(conf)
- .appName(getClass.getSimpleName)
- .master(master)
- .getOrCreate()
- }
-
-}
\ No newline at end of file
diff --git a/dhp-common/src/main/java/eu/dnetlib/dhp/application/SparkScalaApplication.scala b/dhp-common/src/main/java/eu/dnetlib/dhp/application/SparkScalaApplication.scala
index 247bacac0..6541746b2 100644
--- a/dhp-common/src/main/java/eu/dnetlib/dhp/application/SparkScalaApplication.scala
+++ b/dhp-common/src/main/java/eu/dnetlib/dhp/application/SparkScalaApplication.scala
@@ -33,3 +33,40 @@ trait SparkScalaApplication {
*/
def run(): Unit
}
+
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.SparkSession
+import org.slf4j.Logger
+
+abstract class AbstractScalaApplication (val propertyPath:String, val args:Array[String], log:Logger) extends SparkScalaApplication {
+
+ var parser: ArgumentApplicationParser = null
+
+ var spark:SparkSession = null
+
+
+ def initialize():SparkScalaApplication = {
+ parser = parseArguments(args)
+ spark = createSparkSession()
+ this
+ }
+
+ /**
+ * Utility for creating a spark session starting from parser
+ *
+ * @return a spark Session
+ */
+ private def createSparkSession():SparkSession = {
+ require(parser!= null)
+
+ val conf:SparkConf = new SparkConf()
+ val master = parser.get("master")
+ log.info(s"Creating Spark session: Master: $master")
+ SparkSession.builder().config(conf)
+ .appName(getClass.getSimpleName)
+ .master(master)
+ .getOrCreate()
+ }
+
+}
\ No newline at end of file