[orcidenrichment] Fix imports and formatting

This commit is contained in:
Giambattista Bloisi 2024-11-21 16:21:03 +01:00
parent 33eb0f60e6
commit 2639fb5da2
8 changed files with 25 additions and 15 deletions

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.common.enrichment;
public class Constants {

View File

@ -1,6 +1,8 @@
package eu.dnetlib.dhp;
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@ -21,8 +23,6 @@ import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.Relation;
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
public class PropagationConstant {
private PropagationConstant() {
@ -48,7 +48,7 @@ public class PropagationConstant {
public static final String INSTITUTIONAL_REPO_TYPE = "institutional";
//public static final String PROPAGATION_DATA_INFO_TYPE = "propagation";
// public static final String PROPAGATION_DATA_INFO_TYPE = "propagation";
public static final String TRUE = "true";

View File

@ -3,6 +3,7 @@ package eu.dnetlib.dhp.projecttoresult;
import static eu.dnetlib.dhp.PropagationConstant.*;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
import java.util.ArrayList;
import java.util.List;

View File

@ -3,6 +3,7 @@ package eu.dnetlib.dhp.resulttocommunityfromorganization;
import static eu.dnetlib.dhp.PropagationConstant.*;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
import java.util.ArrayList;
import java.util.Arrays;

View File

@ -5,6 +5,7 @@ import static eu.dnetlib.dhp.PropagationConstant.*;
import static eu.dnetlib.dhp.PropagationConstant.PROPAGATION_RESULT_COMMUNITY_ORGANIZATION_CLASS_NAME;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
import java.io.Serializable;
import java.util.ArrayList;

View File

@ -3,6 +3,7 @@ package eu.dnetlib.dhp.resulttocommunityfromsemrel;
import static eu.dnetlib.dhp.PropagationConstant.*;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
import static eu.dnetlib.dhp.common.enrichment.Constants.PROPAGATION_DATA_INFO_TYPE;
import java.util.*;
import java.util.stream.Collectors;

View File

@ -25,6 +25,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.KeyValueSet;
import eu.dnetlib.dhp.PropagationConstant;
import eu.dnetlib.dhp.common.enrichment.Constants;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Relation;
@ -145,7 +146,7 @@ public class SparkJobTest {
.foreach(
r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
Constants.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
result
.foreach(
r -> Assertions
@ -428,7 +429,7 @@ public class SparkJobTest {
.foreach(
r -> Assertions
.assertEquals(
PropagationConstant.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
Constants.PROPAGATION_DATA_INFO_TYPE, r.getDataInfo().getInferenceprovenance()));
project
.foreach(
r -> Assertions

View File

@ -11,7 +11,12 @@ import scala.collection.JavaConverters._
class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String], log: Logger)
extends SparkEnrichWithOrcidAuthors(propertyPath, args, log: Logger) {
override def createTemporaryData(spark:SparkSession, graphPath: String, orcidPath: String, targetPath: String): Unit = {
override def createTemporaryData(
spark: SparkSession,
graphPath: String,
orcidPath: String,
targetPath: String
): Unit = {
val orcidAuthors =
spark.read.load(s"$orcidPath/Authors").select("orcid", "familyName", "givenName", "creditName", "otherNames")
@ -82,7 +87,7 @@ class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String]
}
}
object SparkEnrichGraphWithOrcidAuthors {
object SparkEnrichGraphWithOrcidAuthors {
val log: Logger = LoggerFactory.getLogger(SparkEnrichGraphWithOrcidAuthors.getClass)
@ -91,5 +96,4 @@ class SparkEnrichGraphWithOrcidAuthors(propertyPath: String, args: Array[String]
.initialize()
.run()
}
}
}