refactoring

This commit is contained in:
Miriam Baglioni 2020-08-07 17:41:07 +02:00
parent 346a91f4d9
commit 9675af7965
1 changed files with 3 additions and 37 deletions

View File

@ -8,8 +8,6 @@ import java.io.StringReader;
import java.util.*;
import java.util.stream.Collectors;
import javax.swing.text.html.Option;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
@ -21,7 +19,6 @@ import org.dom4j.io.SAXReader;
import eu.dnetlib.dhp.oa.graph.dump.DumpProducts;
import eu.dnetlib.dhp.oa.graph.dump.Utils;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import eu.dnetlib.dhp.schema.common.ModelSupport;
import eu.dnetlib.dhp.schema.dump.oaf.*;
import eu.dnetlib.dhp.schema.dump.oaf.graph.*;
@ -36,7 +33,6 @@ public class DumpGraphEntities implements Serializable {
String outputPath,
Class<? extends OafEntity> inputClazz,
String communityMapPath) {
// CommunityMap communityMap) {
SparkConf conf = new SparkConf();
@ -47,7 +43,6 @@ public class DumpGraphEntities implements Serializable {
.run(
isSparkSessionManaged, inputPath, outputPath, communityMapPath, inputClazz, Result.class,
true);
// d.run(isSparkSessionManaged, inputPath, outputPath, communityMap, inputClazz, Result.class, true);
break;
case "40":
runWithSparkSession(
@ -109,13 +104,6 @@ public class DumpGraphEntities implements Serializable {
private static Datasource mapDatasource(eu.dnetlib.dhp.schema.oaf.Datasource d) {
Datasource datasource = new Datasource();
// Optional<eu.dnetlib.dhp.schema.oaf.Qualifier> odstype = Optional.ofNullable(d.getDatasourcetype());
//
// if(odstype.isPresent()){
// if (odstype.get().getClassid().equals(Constants.FUNDER_DS)){
// return null;
// }
// }
datasource.setId(d.getId());
@ -296,17 +284,6 @@ public class DumpGraphEntities implements Serializable {
private static Project mapProject(eu.dnetlib.dhp.schema.oaf.Project p) {
Project project = new Project();
// project
// .setCollectedfrom(
// Optional
// .ofNullable(p.getCollectedfrom())
// .map(
// cf -> cf
// .stream()
// .map(coll -> KeyValue.newInstance(coll.getKey(), coll.getValue()))
// .collect(Collectors.toList()))
// .orElse(new ArrayList<>()));
Optional
.ofNullable(p.getId())
.ifPresent(id -> project.setId(id));
@ -343,10 +320,6 @@ public class DumpGraphEntities implements Serializable {
.ofNullable(p.getKeywords())
.ifPresent(key -> project.setKeywords(key.getValue()));
// Optional
// .ofNullable(p.getDuration())
// .ifPresent(duration -> project.setDuration(duration.getValue()));
Optional<Field<String>> omandate = Optional.ofNullable(p.getOamandatepublications());
Optional<Field<String>> oecsc39 = Optional.ofNullable(p.getEcsc39());
boolean mandate = false;
@ -437,26 +410,19 @@ public class DumpGraphEntities implements Serializable {
List<org.dom4j.Node> nodes = doc.selectNodes("//funding_level_" + level);
while (nodes.size() > 0) {
for (org.dom4j.Node n : nodes) {
// Levels funding_stream = new Levels();
// funding_stream.setLevel(String.valueOf(level));
// List node = n.selectNodes("./name");
// funding_stream.setName(((org.dom4j.Node)node.get(0)).getText());
List node = n.selectNodes("./id");
id = ((org.dom4j.Node) node.get(0)).getText();
id = id.substring(id.indexOf("::") + 2);
// funding_stream.setId(((org.dom4j.Node)node.get(0)).getText());
node = n.selectNodes("./description");
description += ((Node) node.get(0)).getText() + " - ";
// funding_stream.setDescription(((Node)node.get(0)).getText());
// fundings.add(funding_stream);
}
level += 1;
nodes = doc.selectNodes("//funding_level_" + level);
}
// if(fundings.size() > 0 ) {
// f.setFunding_levels(fundings);
// }
if (!id.equals("")) {
Fundings fundings = new Fundings();
fundings.setId(id);