Code cleanup.

This commit is contained in:
Lampros Smyrnaios 2024-01-09 17:03:35 +02:00
parent 17282ea8fc
commit eaa070f1e6
2 changed files with 37 additions and 75 deletions

View File

@ -15,13 +15,13 @@
<!-- The "version" is inherited from the parent module. -->
<dependencies>
<dependency>
<groupId>eu.dnetlib.dhp</groupId>
<artifactId>dhp-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>eu.dnetlib</groupId>
<artifactId>uoa-validator-engine2</artifactId>
@ -32,33 +32,18 @@
<artifactId>spark-core_${scala.binary.version}</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql -->
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
</dependency>
<!--
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.woodstox</groupId>
<artifactId>woodstox-core</artifactId>
</dependency>
-->
<!-- Other dependencies. -->
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -4,14 +4,12 @@ package eu.dnetlib.dhp.continuous_validator;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
@ -21,7 +19,6 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.validator2.validation.XMLApplicationProfile;
import eu.dnetlib.validator2.validation.guideline.openaire.*;
import eu.dnetlib.validator2.validation.utils.TestUtils;
import scala.Option;
public class ContinuousValidator {
@ -30,8 +27,6 @@ public class ContinuousValidator {
private static final org.slf4j.Logger logger = LoggerFactory.getLogger(ContinuousValidator.class);
private static final String parametersFile = "input_continuous_validator_parameters.json";
private static final boolean SHOULD_GET_ARGUMENTS_FROM_FILE = true; // It throws an error for now..
public static void main(String[] args) {
ArgumentApplicationParser parser = null;
@ -41,65 +36,48 @@ public class ContinuousValidator {
String guidelines = null;
String outputPath = null;
if (SHOULD_GET_ARGUMENTS_FROM_FILE) {
try {
String jsonConfiguration = IOUtils
.toString(
Objects
.requireNonNull(
ContinuousValidator.class
.getResourceAsStream("/eu/dnetlib/dhp/continuous_validator/" + parametersFile)),
StandardCharsets.UTF_8);
try {
String jsonConfiguration = IOUtils
.toString(
Objects
.requireNonNull(
ContinuousValidator.class
.getResourceAsStream("/eu/dnetlib/dhp/continuous_validator/" + parametersFile)),
StandardCharsets.UTF_8);
parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
} catch (Exception e) {
logger.error("Error when parsing the parameters!", e);
return;
}
parser = new ArgumentApplicationParser(jsonConfiguration);
parser.parseArgument(args);
} catch (Exception e) {
logger.error("Error when parsing the parameters!", e);
return;
}
isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged")) // This param is not mandatory, so it may be null.
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged")) // This param is not mandatory, so it may be null.
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
// This is needed to implement a unit test in which the spark session is created in the context of the
// unit test itself rather than inside the spark application"
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
// This is needed to implement a unit test in which the spark session is created in the context of the
// unit test itself rather than inside the spark application"
parquet_file_path = parser.get("parquet_file_path");
if (parquet_file_path == null) {
logger.error("The \"parquet_file_path\" was not retrieved from the parameters file: " + parametersFile);
return;
}
parquet_file_path = parser.get("parquet_file_path");
if (parquet_file_path == null) {
logger.error("The \"parquet_file_path\" was not retrieved from the parameters file: " + parametersFile);
return;
}
guidelines = parser.get("openaire_guidelines");
if (guidelines == null) {
logger
.error("The \"openaire_guidelines\" was not retrieved from the parameters file: " + parametersFile);
return;
}
guidelines = parser.get("openaire_guidelines");
if (guidelines == null) {
logger
.error("The \"openaire_guidelines\" was not retrieved from the parameters file: " + parametersFile);
return;
}
outputPath = parser.get("outputPath");
if (outputPath == null) {
logger.error("The \"outputPath\" was not retrieved from the parameters file: " + parametersFile);
return;
}
sparkMaster = "local[*]";
} else {
if (args.length != 4) {
String errorMsg = "Wrong number of arguments given! Please run the app like so: java -jar target/dhp-continuous-validation-1.0.0-SNAPSHOT.jar <sparkMaster> <parquetFileFullPath> <guidelines> <outputPath>";
System.err.println(errorMsg);
logger.error(errorMsg);
System.exit(1);
}
sparkMaster = args[0];
logger.info("Will use this Spark master: \"" + sparkMaster + "\".");
parquet_file_path = args[1];
guidelines = args[2];
outputPath = args[3];
outputPath = parser.get("outputPath");
if (outputPath == null) {
logger.error("The \"outputPath\" was not retrieved from the parameters file: " + parametersFile);
return;
}
if (!outputPath.endsWith("/"))
@ -156,7 +134,6 @@ public class ContinuousValidator {
.option("compression", "gzip")
.mode(SaveMode.Overwrite)
.json(finalOutputPath + RESULTS_FILE_NAME); // The filename should be the name of the input-file or the
});
}
}