2020-06-15 11:12:14 +02:00
|
|
|
|
2020-06-09 15:37:46 +02:00
|
|
|
package eu.dnetlib.dhp.oa.graph.dump;
|
|
|
|
|
2020-08-11 15:27:31 +02:00
|
|
|
import java.io.BufferedReader;
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.io.InputStreamReader;
|
2020-08-24 17:00:09 +02:00
|
|
|
import java.io.StringReader;
|
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.List;
|
2020-08-11 15:27:31 +02:00
|
|
|
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2020-06-09 15:39:03 +02:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
|
|
import org.apache.spark.sql.Dataset;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
2020-08-24 17:00:09 +02:00
|
|
|
import org.dom4j.Document;
|
|
|
|
import org.dom4j.DocumentException;
|
|
|
|
import org.dom4j.io.SAXReader;
|
2020-06-09 15:39:03 +02:00
|
|
|
|
2020-06-15 11:12:14 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
2020-08-03 18:06:18 +02:00
|
|
|
import com.google.gson.Gson;
|
2020-06-15 11:12:14 +02:00
|
|
|
|
|
|
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
2020-08-03 18:06:18 +02:00
|
|
|
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
2020-07-29 16:59:48 +02:00
|
|
|
import eu.dnetlib.dhp.oa.graph.dump.graph.Constants;
|
2020-08-24 17:00:09 +02:00
|
|
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.Node;
|
|
|
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.RelType;
|
|
|
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.Relation;
|
|
|
|
import eu.dnetlib.dhp.schema.dump.pidgraph.Entity;
|
2020-07-24 17:30:15 +02:00
|
|
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
|
|
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
|
|
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
2020-06-15 11:12:14 +02:00
|
|
|
|
2020-06-09 15:37:46 +02:00
|
|
|
public class Utils {
|
2020-06-15 11:12:14 +02:00
|
|
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
|
|
|
|
|
|
public static void removeOutputDir(SparkSession spark, String path) {
|
|
|
|
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
|
|
|
|
}
|
2020-06-09 15:39:03 +02:00
|
|
|
|
2020-06-15 11:12:14 +02:00
|
|
|
public static <R> Dataset<R> readPath(
|
|
|
|
SparkSession spark, String inputPath, Class<R> clazz) {
|
|
|
|
return spark
|
|
|
|
.read()
|
|
|
|
.textFile(inputPath)
|
|
|
|
.map((MapFunction<String, R>) value -> OBJECT_MAPPER.readValue(value, clazz), Encoders.bean(clazz));
|
|
|
|
}
|
2020-07-20 17:54:28 +02:00
|
|
|
|
|
|
|
public static ISLookUpService getIsLookUpService(String isLookUpUrl) {
|
|
|
|
return ISLookupClientFactory.getLookUpService(isLookUpUrl);
|
|
|
|
}
|
2020-07-24 17:30:15 +02:00
|
|
|
|
|
|
|
public static String getContextId(String id) {
|
|
|
|
|
|
|
|
return String
|
|
|
|
.format(
|
2020-07-29 16:59:48 +02:00
|
|
|
"%s|%s::%s", Constants.CONTEXT_ID, Constants.CONTEXT_NS_PREFIX,
|
2020-07-24 17:30:15 +02:00
|
|
|
DHPUtils.md5(id));
|
|
|
|
}
|
2020-08-03 18:06:18 +02:00
|
|
|
|
|
|
|
public static CommunityMap getCommunityMap(SparkSession spark, String communityMapPath) {
|
|
|
|
|
|
|
|
return new Gson().fromJson(spark.read().textFile(communityMapPath).collectAsList().get(0), CommunityMap.class);
|
|
|
|
|
|
|
|
}
|
2020-08-11 15:27:31 +02:00
|
|
|
|
|
|
|
public static CommunityMap readCommunityMap(FileSystem fileSystem, String communityMapPath) throws IOException {
|
|
|
|
BufferedReader br = new BufferedReader(new InputStreamReader(fileSystem.open(new Path(communityMapPath))));
|
|
|
|
StringBuffer sb = new StringBuffer();
|
|
|
|
try {
|
|
|
|
String line;
|
|
|
|
while ((line = br.readLine()) != null) {
|
|
|
|
sb.append(line);
|
|
|
|
}
|
|
|
|
} finally {
|
|
|
|
br.close();
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
return new Gson().fromJson(sb.toString(), CommunityMap.class);
|
|
|
|
}
|
2020-08-24 17:00:09 +02:00
|
|
|
|
|
|
|
public static List<Relation> getRelationPair(String pid1, String pid2, String type1, String type2,
|
|
|
|
String semtype, String rel1, String rel2) {
|
|
|
|
List<Relation> ret = new ArrayList<>();
|
|
|
|
ret
|
|
|
|
.add(
|
|
|
|
Relation
|
|
|
|
.newInstance(
|
|
|
|
Node.newInstance(pid1, type1),
|
|
|
|
Node.newInstance(pid2, type2),
|
|
|
|
RelType.newInstance(rel1, semtype),
|
|
|
|
null));
|
|
|
|
|
|
|
|
ret
|
|
|
|
.add(
|
|
|
|
Relation
|
|
|
|
.newInstance(
|
|
|
|
Node.newInstance(pid2, type2),
|
|
|
|
Node.newInstance(pid1, type1),
|
|
|
|
RelType.newInstance(rel2, semtype),
|
|
|
|
null));
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
public static Entity getEntity(String fund, String code) throws DocumentException {
|
|
|
|
{
|
|
|
|
final Document doc;
|
|
|
|
doc = new SAXReader().read(new StringReader(fund));
|
|
|
|
String name = ((org.dom4j.Node) (doc.selectNodes("//funder/shortname").get(0))).getText();
|
|
|
|
return Entity.newInstance(name + ":" + code);
|
|
|
|
}
|
|
|
|
}
|
2020-06-09 15:37:46 +02:00
|
|
|
}
|