BrBETA_dnet-hadoop/dhp-workflows/dhp-dedup-openaire/src/main/java/eu/dnetlib/dhp/oa/dedup/SparkCreateSimRels.java

130 lines
5.4 KiB
Java
Raw Normal View History

package eu.dnetlib.dhp.oa.dedup;
2019-12-06 13:38:00 +01:00
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
2019-12-06 13:38:00 +01:00
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.schema.action.AtomicAction;
2019-12-06 13:38:00 +01:00
import eu.dnetlib.dhp.schema.oaf.Relation;
2020-04-01 18:48:31 +02:00
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
2020-04-01 18:48:31 +02:00
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
2019-12-06 13:38:00 +01:00
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.MapDocumentUtil;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.io.Text;
2019-12-06 13:38:00 +01:00
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
2020-04-01 18:48:31 +02:00
import org.apache.spark.api.java.function.PairFunction;
2019-12-06 13:38:00 +01:00
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession;
import org.dom4j.DocumentException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
2019-12-06 13:38:00 +01:00
import scala.Tuple2;
import java.io.IOException;
2019-12-06 13:38:00 +01:00
import java.util.List;
public class SparkCreateSimRels extends AbstractSparkAction {
2019-12-06 13:38:00 +01:00
private static final Logger log = LoggerFactory.getLogger(SparkCreateSimRels.class);
2019-12-06 13:38:00 +01:00
public SparkCreateSimRels(ArgumentApplicationParser parser, SparkSession spark) {
super(parser, spark);
}
2019-12-06 13:38:00 +01:00
public static void main(String[] args) throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils.toString(
SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
2019-12-06 13:38:00 +01:00
parser.parseArgument(args);
new SparkCreateSimRels(parser, getSparkSession(parser)).run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
}
@Override
public void run(ISLookUpService isLookUpService) throws DocumentException, IOException, ISLookUpException {
//read oozie parameters
final String graphBasePath = parser.get("graphBasePath");
final String isLookUpUrl = parser.get("isLookUpUrl");
final String actionSetId = parser.get("actionSetId");
final String workingPath = parser.get("workingPath");
2020-03-20 19:17:24 +01:00
System.out.println(String.format("graphBasePath: '%s'", graphBasePath));
2020-04-15 16:34:06 +02:00
System.out.println(String.format("isLookUpUrl: '%s'", isLookUpUrl));
System.out.println(String.format("actionSetId: '%s'", actionSetId));
System.out.println(String.format("workingPath: '%s'", workingPath));
2020-03-20 19:17:24 +01:00
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
//for each dedup configuration
for (DedupConfig dedupConf: getConfigurations(isLookUpService, actionSetId)) {
2020-04-15 16:34:06 +02:00
final String entity = dedupConf.getWf().getEntityType();
final String subEntity = dedupConf.getWf().getSubEntityValue();
2020-04-15 16:34:06 +02:00
System.out.println(String.format("Creating simrels for: '%s'", subEntity));
JavaPairRDD<String, MapDocument> mapDocument = sc.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity))
.mapToPair((PairFunction<String, String, MapDocument>) s -> {
MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
return new Tuple2<String, MapDocument>(d.getIdentifier(), d);
});
//create blocks for deduplication
JavaPairRDD<String, List<MapDocument>> blocks = Deduper.createSortedBlocks(sc, mapDocument, dedupConf);
//create relations by comparing only elements in the same group
final JavaPairRDD<String, String> dedupRels = Deduper.computeRelations(sc, blocks, dedupConf);
JavaRDD<Relation> relationsRDD = dedupRels.map(r -> createSimRel(r._1(), r._2(), entity));
//save the simrel in the workingdir
spark.createDataset(relationsRDD.rdd(), Encoders.bean(Relation.class))
.write()
.mode("overwrite")
.save(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity));
}
}
/**
* Utility method used to create an atomic action from a Relation object
* @param relation input relation
* @return A tuple2 with [id, json serialization of the atomic action]
* @throws JsonProcessingException
*/
public Tuple2<Text, Text> createSequenceFileRow(Relation relation) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
String id = relation.getSource() + "@" + relation.getRelClass() + "@" + relation.getTarget();
AtomicAction<Relation> aa = new AtomicAction<>(Relation.class, relation);
return new Tuple2<>(
new Text(id),
new Text(mapper.writeValueAsString(aa))
);
}
public Relation createSimRel(String source, String target, String entity){
final Relation r = new Relation();
r.setSource(source);
r.setTarget(target);
2019-12-06 13:38:00 +01:00
switch(entity){
case "result":
r.setRelClass("resultResult_dedupSimilarity_isSimilarTo");
break;
case "organization":
r.setRelClass("organizationOrganization_dedupSimilarity_isSimilarTo");
break;
default:
r.setRelClass("isSimilarTo");
break;
}
return r;
}
}