1
0
Fork 0

code formatting

This commit is contained in:
Claudio Atzori 2022-07-01 10:44:08 +02:00
parent 4ec13e2b66
commit 0cb1c70788
10 changed files with 304 additions and 287 deletions

View File

@ -255,7 +255,8 @@ public class ZenodoAPIClient implements Serializable {
private void setDepositionId(String concept_rec_id, Integer page) throws IOException, MissingConceptDoiException {
ZenodoModelList zenodoModelList = new Gson().fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
ZenodoModelList zenodoModelList = new Gson()
.fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
for (ZenodoModel zm : zenodoModelList) {
if (zm.getConceptrecid().equals(concept_rec_id)) {
@ -264,7 +265,8 @@ public class ZenodoAPIClient implements Serializable {
}
}
if (zenodoModelList.size() == 0)
throw new MissingConceptDoiException("The concept record id specified was missing in the list of depositions");
throw new MissingConceptDoiException(
"The concept record id specified was missing in the list of depositions");
setDepositionId(concept_rec_id, page + 1);
}
@ -295,7 +297,6 @@ public class ZenodoAPIClient implements Serializable {
}
private String getBucket(String url) throws IOException {
OkHttpClient httpClient = new OkHttpClient.Builder()
.connectTimeout(600, TimeUnit.SECONDS)

View File

@ -142,7 +142,8 @@ class TransformationJobTest extends AbstractVocabularyTest {
@Test
@DisplayName("Test TransformSparkJobNode.main with oaiOpenaire_datacite (v4)")
void transformTestITGv4OAIdatacite(@TempDir final Path testDir) throws Exception {
void transformTestITGv4OAIdatacite(@TempDir
final Path testDir) throws Exception {
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {
@ -152,7 +153,9 @@ class TransformationJobTest extends AbstractVocabularyTest {
.getFile();
final String mdstore_output = testDir.toString() + "/version";
mockupTrasformationRule("simpleTRule", "/eu/dnetlib/dhp/transform/scripts/xslt_cleaning_oaiOpenaire_datacite_ExchangeLandingpagePid.xsl");
mockupTrasformationRule(
"simpleTRule",
"/eu/dnetlib/dhp/transform/scripts/xslt_cleaning_oaiOpenaire_datacite_ExchangeLandingpagePid.xsl");
final Map<String, String> parameters = Stream.of(new String[][] {
{
@ -203,7 +206,8 @@ class TransformationJobTest extends AbstractVocabularyTest {
@Test
@DisplayName("Test TransformSparkJobNode.main")
void transformTest(@TempDir final Path testDir) throws Exception {
void transformTest(@TempDir
final Path testDir) throws Exception {
try (SparkSession spark = SparkSession.builder().config(sparkConf).getOrCreate()) {

View File

@ -1,10 +1,14 @@
package eu.dnetlib.dhp.oa.graph.dump.funderresults;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FlatMapFunction;
@ -14,17 +18,20 @@ import org.apache.spark.sql.*;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;
import eu.dnetlib.dhp.schema.dump.oaf.community.Funder;
import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
/**
* Splits the dumped results by funder and stores them in a folder named as the funder nsp (for all the funders, but the EC
* for the EC it specifies also the fundingStream (FP7 or H2020)
*/
public class SparkDumpFunderResults implements Serializable {
private static final Logger log = LoggerFactory.getLogger(SparkDumpFunderResults.class);
public static void main(String[] args) throws Exception {
String jsonConfiguration = IOUtils
.toString(
@ -51,6 +58,7 @@ public class SparkDumpFunderResults implements Serializable {
writeResultProjectList(spark, inputPath, outputPath);
});
}
private static void writeResultProjectList(SparkSession spark, String inputPath, String outputPath) {
Dataset<CommunityResult> result = Utils
.readPath(spark, inputPath + "/publication", CommunityResult.class)
@ -68,6 +76,7 @@ public class SparkDumpFunderResults implements Serializable {
dumpResults(funder, result, outputPath);
});
}
@NotNull
private static String getFunderName(Project p) {
Optional<Funder> ofunder = Optional.ofNullable(p.getFunder());
@ -97,6 +106,7 @@ public class SparkDumpFunderResults implements Serializable {
return fName;
}
}
private static void dumpResults(String funder, Dataset<CommunityResult> results, String outputPath) {
results.map((MapFunction<CommunityResult, CommunityResult>) r -> {
if (!Optional.ofNullable(r.getProjects()).isPresent()) {

View File

@ -2,9 +2,11 @@
package eu.dnetlib.dhp.oa.graph.dump.projectssubset;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import java.io.Serializable;
import java.util.Objects;
import java.util.Optional;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.MapFunction;
@ -14,6 +16,7 @@ import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;

View File

@ -30,7 +30,6 @@ public class ProjectSubsetTest {
private static final Logger log = LoggerFactory
.getLogger(eu.dnetlib.dhp.oa.graph.dump.projectssubset.ProjectSubsetTest.class);
@BeforeAll
public static void beforeAll() throws IOException {
workingDir = Files