Merge branch 'master' of code-repo.d4science.org:D-Net/dnet-hadoop

This commit is contained in:
Michele Artini 2020-09-08 16:39:54 +02:00
commit 9cfc124ac5
14 changed files with 123 additions and 120 deletions

View File

@ -19,7 +19,7 @@
<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/> <setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/> <setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/> <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_switch_statement" value="common_lines"/>
<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/> <setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="false"/>
<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/> <setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/> <setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
<setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/> <setting id="org.eclipse.jdt.core.formatter.parentheses_positions_in_enum_constant_declaration" value="common_lines"/>

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.common.api; package eu.dnetlib.dhp.common.api;
public class MissingConceptDoiException extends Throwable { public class MissingConceptDoiException extends Throwable {

View File

@ -12,14 +12,12 @@ import okhttp3.*;
public class ZenodoAPIClient implements Serializable { public class ZenodoAPIClient implements Serializable {
String urlString; String urlString;
String bucket; String bucket;
String deposition_id; String deposition_id;
String access_token; String access_token;
public static final MediaType MEDIA_TYPE_JSON = MediaType.parse("application/json; charset=utf-8"); public static final MediaType MEDIA_TYPE_JSON = MediaType.parse("application/json; charset=utf-8");
private static final MediaType MEDIA_TYPE_ZIP = MediaType.parse("application/zip"); private static final MediaType MEDIA_TYPE_ZIP = MediaType.parse("application/zip");
@ -40,7 +38,9 @@ public class ZenodoAPIClient implements Serializable {
this.bucket = bucket; this.bucket = bucket;
} }
public void setDeposition_id(String deposition_id){this.deposition_id = deposition_id;} public void setDeposition_id(String deposition_id) {
this.deposition_id = deposition_id;
}
public ZenodoAPIClient(String urlString, String access_token) throws IOException { public ZenodoAPIClient(String urlString, String access_token) throws IOException {
@ -205,8 +205,8 @@ public class ZenodoAPIClient implements Serializable {
ZenodoModelList zenodoModelList = new Gson().fromJson(getPrevDepositions(), ZenodoModelList.class); ZenodoModelList zenodoModelList = new Gson().fromJson(getPrevDepositions(), ZenodoModelList.class);
for(ZenodoModel zm : zenodoModelList){ for (ZenodoModel zm : zenodoModelList) {
if (zm.getConceptrecid().equals(concept_rec_id)){ if (zm.getConceptrecid().equals(concept_rec_id)) {
deposition_id = zm.getId(); deposition_id = zm.getId();
return; return;
} }
@ -255,12 +255,10 @@ public class ZenodoAPIClient implements Serializable {
// Get response body // Get response body
ZenodoModel zenodoModel = new Gson().fromJson(response.body().string(), ZenodoModel.class); ZenodoModel zenodoModel = new Gson().fromJson(response.body().string(), ZenodoModel.class);
return zenodoModel.getLinks().getBucket(); return zenodoModel.getLinks().getBucket();
} }
} }
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.common.api.zenodo; package eu.dnetlib.dhp.common.api.zenodo;
import java.util.ArrayList; import java.util.ArrayList;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.schema.dump.oaf; package eu.dnetlib.dhp.schema.dump.oaf;
import java.io.Serializable; import java.io.Serializable;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.schema.dump.oaf; package eu.dnetlib.dhp.schema.dump.oaf;
import java.io.Serializable; import java.io.Serializable;

View File

@ -10,7 +10,6 @@ import eu.dnetlib.dhp.schema.dump.oaf.KeyValue;
import eu.dnetlib.dhp.schema.dump.oaf.Qualifier; import eu.dnetlib.dhp.schema.dump.oaf.Qualifier;
import eu.dnetlib.dhp.schema.dump.oaf.community.Project; import eu.dnetlib.dhp.schema.dump.oaf.community.Project;
/** /**
* To represent the generic organizaiton. It has the following parameters: * To represent the generic organizaiton. It has the following parameters:
* - private String legalshortname to store the legalshortname of the organizaiton * - private String legalshortname to store the legalshortname of the organizaiton

View File

@ -368,7 +368,7 @@ public class ResultMapper implements Serializable {
Instance instance = new Instance(); Instance instance = new Instance();
if(!graph){ if (!graph) {
instance instance
.setCollectedfrom( .setCollectedfrom(
KeyValue KeyValue
@ -393,7 +393,6 @@ public class ResultMapper implements Serializable {
} }
} }
Optional Optional
.ofNullable(i.getLicense()) .ofNullable(i.getLicense())
.ifPresent(value -> instance.setLicense(value.getValue())); .ifPresent(value -> instance.setLicense(value.getValue()));

View File

@ -3,11 +3,13 @@ package eu.dnetlib.dhp.oa.graph.dump;
import java.io.Serializable; import java.io.Serializable;
import java.util.Optional; import java.util.Optional;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.*;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.api.MissingConceptDoiException; import eu.dnetlib.dhp.common.api.MissingConceptDoiException;
import eu.dnetlib.dhp.common.api.ZenodoAPIClient; import eu.dnetlib.dhp.common.api.ZenodoAPIClient;

View File

@ -8,8 +8,6 @@ import java.io.StringReader;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.dump.oaf.graph.Funder;
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode; import org.apache.spark.sql.SaveMode;
@ -24,6 +22,8 @@ import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.schema.common.ModelSupport; import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.dump.oaf.*; import eu.dnetlib.dhp.schema.dump.oaf.*;
import eu.dnetlib.dhp.schema.dump.oaf.graph.*; import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
import eu.dnetlib.dhp.schema.dump.oaf.graph.Funder;
import eu.dnetlib.dhp.schema.dump.oaf.graph.Project;
import eu.dnetlib.dhp.schema.oaf.Field; import eu.dnetlib.dhp.schema.oaf.Field;
import eu.dnetlib.dhp.schema.oaf.Journal; import eu.dnetlib.dhp.schema.oaf.Journal;
import eu.dnetlib.dhp.schema.oaf.OafEntity; import eu.dnetlib.dhp.schema.oaf.OafEntity;

View File

@ -1,5 +1,4 @@
package eu.dnetlib.dhp.oa.graph.dump.graph; package eu.dnetlib.dhp.oa.graph.dump.graph;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;

View File

@ -4,6 +4,7 @@ package eu.dnetlib.dhp.oa.graph.dump;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaRDD;
@ -16,7 +17,9 @@ import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunitySplit; import eu.dnetlib.dhp.oa.graph.dump.community.CommunitySplit;
import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult; import eu.dnetlib.dhp.schema.dump.oaf.community.CommunityResult;

View File

@ -38,7 +38,6 @@ public class ExtractRelationFromEntityTest {
private static final Logger log = LoggerFactory private static final Logger log = LoggerFactory
.getLogger(ExtractRelationFromEntityTest.class); .getLogger(ExtractRelationFromEntityTest.class);
@BeforeAll @BeforeAll
public static void beforeAll() throws IOException { public static void beforeAll() throws IOException {
workingDir = Files workingDir = Files