forked from D-Net/dnet-hadoop
apply changes in D-Net/dnet-hadoop#40 (comment)
This commit is contained in:
parent
69bbb9592a
commit
90b54d3efb
|
@ -1,8 +1,3 @@
|
||||||
/**
|
|
||||||
* Dumps of entities in the model defined in eu.dnetlib.dhp.schema.dump.oaf.graph.
|
|
||||||
* Results are dumped using the same Mapper as for eu.dnetlib.dhp.schema.dump.oaf.community, while for
|
|
||||||
* the other entities the mapping is defined below
|
|
||||||
*/
|
|
||||||
|
|
||||||
package eu.dnetlib.dhp.oa.graph.dump.graph;
|
package eu.dnetlib.dhp.oa.graph.dump.graph;
|
||||||
|
|
||||||
|
@ -31,6 +26,10 @@ import eu.dnetlib.dhp.schema.oaf.Field;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Journal;
|
import eu.dnetlib.dhp.schema.oaf.Journal;
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dumps of entities in the model defined in eu.dnetlib.dhp.schema.dump.oaf.graph. Results are dumped using the same
|
||||||
|
* Mapper as for eu.dnetlib.dhp.schema.dump.oaf.community, while for the other entities the mapping is defined below
|
||||||
|
*/
|
||||||
public class DumpGraphEntities implements Serializable {
|
public class DumpGraphEntities implements Serializable {
|
||||||
|
|
||||||
public void run(Boolean isSparkSessionManaged,
|
public void run(Boolean isSparkSessionManaged,
|
||||||
|
@ -285,7 +284,7 @@ public class DumpGraphEntities implements Serializable {
|
||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Project mapProject(eu.dnetlib.dhp.schema.oaf.Project p) {
|
private static Project mapProject(eu.dnetlib.dhp.schema.oaf.Project p) throws DocumentException {
|
||||||
Project project = new Project();
|
Project project = new Project();
|
||||||
|
|
||||||
Optional
|
Optional
|
||||||
|
@ -383,24 +382,24 @@ public class DumpGraphEntities implements Serializable {
|
||||||
.collect(Collectors.toList()))
|
.collect(Collectors.toList()))
|
||||||
.orElse(new ArrayList<>()));
|
.orElse(new ArrayList<>()));
|
||||||
|
|
||||||
project
|
Optional<List<Field<String>>> ofundTree = Optional
|
||||||
.setFunding(
|
.ofNullable(p.getFundingtree());
|
||||||
Optional
|
List<Funder> funList = new ArrayList<>();
|
||||||
.ofNullable(p.getFundingtree())
|
if (ofundTree.isPresent()) {
|
||||||
.map(
|
for (Field<String> fundingtree : ofundTree.get()) {
|
||||||
value -> value
|
funList.add(getFunder(fundingtree.getValue()));
|
||||||
.stream()
|
}
|
||||||
.map(fundingtree -> getFunder(fundingtree.getValue()))
|
}
|
||||||
.collect(Collectors.toList()))
|
project.setFunding(funList);
|
||||||
.orElse(new ArrayList<>()));
|
|
||||||
return project;
|
return project;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Funder getFunder(String fundingtree) {
|
public static Funder getFunder(String fundingtree) throws DocumentException {
|
||||||
|
|
||||||
Funder f = new Funder();
|
Funder f = new Funder();
|
||||||
final Document doc;
|
final Document doc;
|
||||||
try {
|
|
||||||
doc = new SAXReader().read(new StringReader(fundingtree));
|
doc = new SAXReader().read(new StringReader(fundingtree));
|
||||||
f.setShortName(((org.dom4j.Node) (doc.selectNodes("//funder/shortname").get(0))).getText());
|
f.setShortName(((org.dom4j.Node) (doc.selectNodes("//funder/shortname").get(0))).getText());
|
||||||
f.setName(((org.dom4j.Node) (doc.selectNodes("//funder/name").get(0))).getText());
|
f.setName(((org.dom4j.Node) (doc.selectNodes("//funder/name").get(0))).getText());
|
||||||
|
@ -435,10 +434,6 @@ public class DumpGraphEntities implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
return f;
|
return f;
|
||||||
} catch (DocumentException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
return f;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue