[orcidenrichment] Fix imports and formatting
This commit is contained in:
parent
33eb0f60e6
commit
2639fb5da2
|
@ -1,3 +1,4 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.enrichment;
|
package eu.dnetlib.dhp.common.enrichment;
|
||||||
|
|
||||||
public class Constants {
|
public class Constants {
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp;
|
package eu.dnetlib.dhp;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -21,8 +23,6 @@ import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
|
|
||||||
|
|
||||||
public class PropagationConstant {
|
public class PropagationConstant {
|
||||||
|
|
||||||
private PropagationConstant() {
|
private PropagationConstant() {
|
||||||
|
@ -48,7 +48,7 @@ public class PropagationConstant {
|
||||||
|
|
||||||
public static final String INSTITUTIONAL_REPO_TYPE = "institutional";
|
public static final String INSTITUTIONAL_REPO_TYPE = "institutional";
|
||||||
|
|
||||||
//public static final String PROPAGATION_DATA_INFO_TYPE = "propagation";
|
// public static final String PROPAGATION_DATA_INFO_TYPE = "propagation";
|
||||||
|
|
||||||
public static final String TRUE = "true";
|
public static final String TRUE = "true";
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.projecttoresult;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.resulttocommunityfromorganization;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
|
|
@ -5,6 +5,7 @@ import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME;
|
import static eu.dnetlib.dhp.PropagationConstant.PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.resulttocommunityfromsemrel;
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.PropagationConstant.*;
|
import static eu.dnetlib.dhp.PropagationConstant.*;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
|
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
|
@ -25,6 +25,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.KeyValueSet;
|
import eu.dnetlib.dhp.KeyValueSet;
|
||||||
import eu.dnetlib.dhp.PropagationConstant;
|
import eu.dnetlib.dhp.PropagationConstant;
|
||||||
|
import eu.dnetlib.dhp.common.enrichment.Constants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
|
@ -145,7 +146,7 @@ public class SparkJobTest {
|
||||||
.foreach(
|
.foreach(
|
||||||
r -> Assertions
|
r -> Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
PropagationConstant.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
|
Constants.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
|
||||||
result
|
result
|
||||||
.foreach(
|
.foreach(
|
||||||
r -> Assertions
|
r -> Assertions
|
||||||
|
@ -428,7 +429,7 @@ public class SparkJobTest {
|
||||||
.foreach(
|
.foreach(
|
||||||
r -> Assertions
|
r -> Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
PropagationConstant.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
|
Constants.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
|
||||||
project
|
project
|
||||||
.foreach(
|
.foreach(
|
||||||
r -> Assertions
|
r -> Assertions
|
||||||
|
|
|
@ -11,7 +11,12 @@ import scala.collection.JavaConverters._
|
||||||
class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String], log: Logger)
|
class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String], log: Logger)
|
||||||
extends SparkEnrichWithOrcidAuthors(propertyPath, args, log: Logger) {
|
extends SparkEnrichWithOrcidAuthors(propertyPath, args, log: Logger) {
|
||||||
|
|
||||||
override def createTemporaryData(spark:SparkSession, graphPath: String, orcidPath: String, targetPath: String): Unit = {
|
override def createTemporaryData(
|
||||||
|
spark: SparkSession,
|
||||||
|
graphPath: String,
|
||||||
|
orcidPath: String,
|
||||||
|
targetPath: String
|
||||||
|
): Unit = {
|
||||||
val orcidAuthors =
|
val orcidAuthors =
|
||||||
spark.read.load(s"$orcidPath/Authors").select("orcid", "familyName", "givenName", "creditName", "otherNames")
|
spark.read.load(s"$orcidPath/Authors").select("orcid", "familyName", "givenName", "creditName", "otherNames")
|
||||||
|
|
||||||
|
@ -82,7 +87,7 @@ class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
object SparkEnrichGraphWithOrcidAuthors {
|
object SparkEnrichGraphWithOrcidAuthors {
|
||||||
|
|
||||||
val log: Logger = LoggerFactory.getLogger(SparkEnrichGraphWithOrcidAuthors.getClass)
|
val log: Logger = LoggerFactory.getLogger(SparkEnrichGraphWithOrcidAuthors.getClass)
|
||||||
|
|
||||||
|
@ -91,5 +96,4 @@ class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String]
|
||||||
.initialize()
|
.initialize()
|
||||||
.run()
|
.run()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue