1
0
Fork 0

minor changes

This commit is contained in:
miconis 2021-03-19 16:57:40 +01:00
parent 1a85020572
commit 98854b0124
9 changed files with 56 additions and 384 deletions

View File

@ -68,12 +68,9 @@ public class SparkCopyOpenorgs extends AbstractSparkAction {
log.info("Copying openorgs to the working dir");
final String outputPath = DedupUtility.createDedupRecordPath(workingPath, actionSetId, subEntity);
removeOutputDir(spark, outputPath);
final String entityPath = DedupUtility.createEntityPath(graphBasePath, subEntity);
final Class<OafEntity> clazz = ModelSupport.entityTypes.get(EntityType.valueOf(subEntity));
filterOpenorgs(spark, entityPath)
.write()
.mode(SaveMode.Overwrite)
@ -95,9 +92,13 @@ public class SparkCopyOpenorgs extends AbstractSparkAction {
.rdd(),
Encoders.bean(Organization.class));
entities.show();
log.info("Number of organization entities processed: {}", entities.count());
return entities.filter(entities.col("id").contains("openorgs____"));
entities = entities.filter(entities.col("id").contains("openorgs____"));
log.info("Number of Openorgs organization entities: {}", entities.count());
return entities;
}
}

View File

@ -74,8 +74,6 @@ public class SparkCopyOpenorgsMergeRels extends AbstractSparkAction {
final String outputPath = DedupUtility.createMergeRelPath(workingPath, actionSetId, "organization");
removeOutputDir(spark, outputPath);
final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation");
DedupConfig dedupConf = getConfigurations(isLookUpService, actionSetId).get(0);
@ -85,11 +83,13 @@ public class SparkCopyOpenorgsMergeRels extends AbstractSparkAction {
.textFile(relationPath)
.map(patchRelFn(), Encoders.bean(Relation.class))
.toJavaRDD()
.filter(this::isOpenorgs) // takes only relations coming from openorgs
.filter(this::filterOpenorgsRels) // takes only isSimilarTo relations between organizations from openorgs
.filter(this::excludeOpenorgsMesh) // excludes relations between an organization and an openorgsmesh
.filter(this::isOpenorgs)
.filter(this::filterOpenorgsRels)
.filter(this::excludeOpenorgsMesh)
.filter(this::excludeNonOpenorgs); // excludes relations with no openorgs id involved
log.info("Number of raw Openorgs Relations collected: {}", rawRels.count());
// turn openorgs isSimilarTo relations into mergerels
JavaRDD<Relation> mergeRelsRDD = rawRels.flatMap(rel -> {
List<Relation> mergerels = new ArrayList<>();
@ -103,6 +103,8 @@ public class SparkCopyOpenorgsMergeRels extends AbstractSparkAction {
return mergerels.iterator();
});
log.info("Number of Openorgs Merge Relations created: {}", mergeRelsRDD.count());
spark
.createDataset(
mergeRelsRDD.rdd(),
@ -134,7 +136,7 @@ public class SparkCopyOpenorgsMergeRels extends AbstractSparkAction {
if (rel.getCollectedfrom() != null) {
for (KeyValue k : rel.getCollectedfrom()) {
if (k.getValue().equals("OpenOrgs Database")) {
if (k.getValue() != null && k.getValue().equals("OpenOrgs Database")) {
return true;
}
}
@ -144,7 +146,7 @@ public class SparkCopyOpenorgsMergeRels extends AbstractSparkAction {
private boolean excludeOpenorgsMesh(Relation rel) {
if (rel.getSource().equals("openorgsmesh") || rel.getTarget().equals("openorgsmesh")) {
if (rel.getSource().contains("openorgsmesh") || rel.getTarget().contains("openorgsmesh")) {
return false;
}
return true;
@ -152,7 +154,7 @@ public class SparkCopyOpenorgsMergeRels extends AbstractSparkAction {
private boolean excludeNonOpenorgs(Relation rel) {
if (rel.getSource().equals("openorgs____") || rel.getTarget().equals("openorgs____")) {
if (rel.getSource().contains("openorgs____") || rel.getTarget().contains("openorgs____")) {
return true;
}
return false;

View File

@ -33,7 +33,7 @@ import scala.Tuple2;
public class SparkCopyRelationsNoOpenorgs extends AbstractSparkAction {
private static final Logger log = LoggerFactory.getLogger(SparkUpdateEntity.class);
private static final Logger log = LoggerFactory.getLogger(SparkCopyRelationsNoOpenorgs.class);
public SparkCopyRelationsNoOpenorgs(ArgumentApplicationParser parser, SparkSession spark) {
super(parser, spark);
@ -52,7 +52,7 @@ public class SparkCopyRelationsNoOpenorgs extends AbstractSparkAction {
conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
conf.registerKryoClasses(ModelSupport.getOafModelClasses());
new SparkUpdateEntity(parser, getSparkSession(conf))
new SparkCopyRelationsNoOpenorgs(parser, getSparkSession(conf))
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
}
@ -69,14 +69,14 @@ public class SparkCopyRelationsNoOpenorgs extends AbstractSparkAction {
final String relationPath = DedupUtility.createEntityPath(graphBasePath, "relation");
final String outputPath = DedupUtility.createEntityPath(dedupGraphPath, "relation");
removeOutputDir(spark, outputPath);
JavaRDD<Relation> simRels = spark
.read()
.textFile(relationPath)
.map(patchRelFn(), Encoders.bean(Relation.class))
.toJavaRDD()
.filter(this::excludeOpenorgsRels);
.filter(x -> !isOpenorgs(x));
log.info("Number of non-Openorgs relations collected: {}", simRels.count());
spark
.createDataset(simRels.rdd(), Encoders.bean(Relation.class))
@ -96,15 +96,15 @@ public class SparkCopyRelationsNoOpenorgs extends AbstractSparkAction {
};
}
private boolean excludeOpenorgsRels(Relation rel) {
private boolean isOpenorgs(Relation rel) {
if (rel.getCollectedfrom() != null) {
for (KeyValue k : rel.getCollectedfrom()) {
if (k.getValue().equals("OpenOrgs Database")) {
return false;
if (k.getValue() != null && k.getValue().equals("OpenOrgs Database")) {
return true;
}
}
}
return true;
return false;
}
}

View File

@ -1,281 +0,0 @@
package eu.dnetlib.dhp.oa.dedup;
import static eu.dnetlib.dhp.oa.dedup.SparkCreateMergeRels.DNET_PROVENANCE_ACTIONS;
import static eu.dnetlib.dhp.oa.dedup.SparkCreateMergeRels.PROVENANCE_ACTION_CLASS;
import static eu.dnetlib.dhp.oa.dedup.SparkCreateMergeRels.hash;
import java.io.IOException;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.graphx.Edge;
import org.apache.spark.rdd.RDD;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.dom4j.DocumentException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Iterables;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.oa.dedup.graph.ConnectedComponent;
import eu.dnetlib.dhp.oa.dedup.graph.GraphProcessor;
import eu.dnetlib.dhp.oa.dedup.model.Block;
import eu.dnetlib.dhp.schema.oaf.DataInfo;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.Relation;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.MapDocumentUtil;
import scala.Tuple2;
public class SparkRemoveDiffRels extends AbstractSparkAction {
private static final Logger log = LoggerFactory.getLogger(SparkRemoveDiffRels.class);
public SparkRemoveDiffRels(ArgumentApplicationParser parser, SparkSession spark) {
super(parser, spark);
}
public static void main(String[] args) throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
SparkCreateSimRels.class
.getResourceAsStream(
"/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
parser.parseArgument(args);
SparkConf conf = new SparkConf();
new SparkCreateSimRels(parser, getSparkSession(conf))
.run(ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl")));
}
@Override
public void run(ISLookUpService isLookUpService)
throws DocumentException, IOException, ISLookUpException {
// read oozie parameters
final String graphBasePath = parser.get("graphBasePath");
final String isLookUpUrl = parser.get("isLookUpUrl");
final String actionSetId = parser.get("actionSetId");
final String workingPath = parser.get("workingPath");
final int numPartitions = Optional
.ofNullable(parser.get("numPartitions"))
.map(Integer::valueOf)
.orElse(NUM_PARTITIONS);
log.info("numPartitions: '{}'", numPartitions);
log.info("graphBasePath: '{}'", graphBasePath);
log.info("isLookUpUrl: '{}'", isLookUpUrl);
log.info("actionSetId: '{}'", actionSetId);
log.info("workingPath: '{}'", workingPath);
// for each dedup configuration
for (DedupConfig dedupConf : getConfigurations(isLookUpService, actionSetId)) {
final String entity = dedupConf.getWf().getEntityType();
final String subEntity = dedupConf.getWf().getSubEntityValue();
log.info("Removing diffrels for: '{}'", subEntity);
final String mergeRelsPath = DedupUtility.createMergeRelPath(workingPath, actionSetId, subEntity);
final String relationPath = DedupUtility.createEntityPath(graphBasePath, subEntity);
final String openorgsMergeRelsPath = DedupUtility
.createOpenorgsMergeRelsPath(workingPath, actionSetId, subEntity);
final int maxIterations = dedupConf.getWf().getMaxIterations();
log.info("Max iterations {}", maxIterations);
JavaRDD<Relation> mergeRelsRDD = spark
.read()
.load(mergeRelsPath)
.as(Encoders.bean(Relation.class))
.where("relClass == 'merges'")
.toJavaRDD();
System.out.println("mergeRelsRDD = " + mergeRelsRDD.count());
// JavaRDD<Tuple2<Tuple2<String, String>, String>> diffRelsRDD = spark
// .read()
// .textFile(relationPath)
// .map(patchRelFn(), Encoders.bean(Relation.class))
// .toJavaRDD()
// .filter(r -> filterRels(r, entity))
// .map(rel -> {
// if (rel.getSource().compareTo(rel.getTarget()) < 0)
// return new Tuple2<>(new Tuple2<>(rel.getSource(), rel.getTarget()), "diffRel");
// else
// return new Tuple2<>(new Tuple2<>(rel.getTarget(), rel.getSource()), "diffRel");
// });
// THIS IS FOR TESTING PURPOSE
JavaRDD<Tuple2<Tuple2<String, String>, String>> diffRelsRDD = spark
.read()
.load(mergeRelsPath)
.as(Encoders.bean(Relation.class))
.toJavaRDD()
.map(rel -> {
if (rel.getSource().compareTo(rel.getTarget()) < 0)
return new Tuple2<>(new Tuple2<>(rel.getSource(), rel.getTarget()), "diffRel");
else
return new Tuple2<>(new Tuple2<>(rel.getTarget(), rel.getSource()), "diffRel");
})
.distinct();
System.out.println("diffRelsRDD = " + diffRelsRDD.count());
// JavaRDD<Tuple2<Tuple2<String, String>, String>> flatMergeRels = mergeRelsRDD
// .mapToPair(rel -> new Tuple2<>(rel.getSource(), rel.getTarget()))
// .groupByKey()
// .flatMap(g -> {
// List<Tuple2<Tuple2<String, String>, String>> rels = new ArrayList<>();
//
// List<String> ids = StreamSupport
// .stream(g._2().spliterator(), false)
// .collect(Collectors.toList());
//
// for (int i = 0; i < ids.size(); i++) {
// for (int j = i + 1; j < ids.size(); j++) {
// if (ids.get(i).compareTo(ids.get(j)) < 0)
// rels.add(new Tuple2<>(new Tuple2<>(ids.get(i), ids.get(j)), g._1()));
// else
// rels.add(new Tuple2<>(new Tuple2<>(ids.get(j), ids.get(i)), g._1()));
// }
// }
// return rels.iterator();
//
// });
JavaRDD<Tuple2<Tuple2<String, String>, String>> mergeRels = mergeRelsRDD
.map(rel -> {
if (rel.getSource().compareTo(rel.getTarget()) < 0)
return new Tuple2<>(new Tuple2<>(rel.getSource(), rel.getTarget()), "mergeRel");
else
return new Tuple2<>(new Tuple2<>(rel.getTarget(), rel.getSource()), "mergeRel");
});
System.out.println("mergeRelsProcessed = " + mergeRels.count());
// JavaRDD<Relation> purgedMergeRels = flatMergeRels
// .union(diffRelsRDD)
// .mapToPair(rel -> new Tuple2<>(rel._1(), Arrays.asList(rel._2())))
// .reduceByKey((a, b) -> {
// List<String> list = new ArrayList<String>();
// list.addAll(a);
// list.addAll(b);
// return list;
// })
// .filter(rel -> rel._2().size() == 1)
// .mapToPair(rel -> new Tuple2<>(rel._2().get(0), rel._1()))
// .flatMap(rel -> {
// List<Tuple2<String, String>> rels = new ArrayList<>();
// String source = rel._1();
// rels.add(new Tuple2<>(source, rel._2()._1()));
// rels.add(new Tuple2<>(source, rel._2()._2()));
// return rels.iterator();
// })
// .distinct()
// .flatMap(rel -> tupleToMergeRel(rel, dedupConf));
JavaRDD<Relation> purgedMergeRels = mergeRels
.union(diffRelsRDD)
.mapToPair(t -> new Tuple2<>(t._1()._1() + "|||" + t._1()._2(), t._2()))
.groupByKey()
.filter(g -> Iterables.size(g._2()) == 1)
.flatMap(
t -> tupleToMergeRel(
new Tuple2<>(t._1().split("\\|\\|\\|")[0], t._1().split("\\|\\|\\|")[1]),
dedupConf));
System.out.println("purgedMergeRels = " + purgedMergeRels.count());
spark
.createDataset(purgedMergeRels.rdd(), Encoders.bean(Relation.class))
.write()
.mode(SaveMode.Overwrite)
.json(openorgsMergeRelsPath);
}
}
private static MapFunction<String, Relation> patchRelFn() {
return value -> {
final Relation rel = OBJECT_MAPPER.readValue(value, Relation.class);
if (rel.getDataInfo() == null) {
rel.setDataInfo(new DataInfo());
}
return rel;
};
}
private boolean filterRels(Relation rel, String entityType) {
switch (entityType) {
case "result":
if (rel.getRelClass().equals("isDifferentFrom") && rel.getRelType().equals("resultResult")
&& rel.getSubRelType().equals("dedup"))
return true;
break;
case "organization":
if (rel.getRelClass().equals("isDifferentFrom") && rel.getRelType().equals("organizationOrganization")
&& rel.getSubRelType().equals("dedup"))
return true;
break;
default:
return false;
}
return false;
}
public Iterator<Relation> tupleToMergeRel(Tuple2<String, String> rel, DedupConfig dedupConf) {
List<Relation> rels = new ArrayList<>();
rels.add(rel(rel._1(), rel._2(), "merges", dedupConf));
rels.add(rel(rel._2(), rel._1(), "isMergedIn", dedupConf));
return rels.iterator();
}
private Relation rel(String source, String target, String relClass, DedupConfig dedupConf) {
String entityType = dedupConf.getWf().getEntityType();
Relation r = new Relation();
r.setSource(source);
r.setTarget(target);
r.setRelClass(relClass);
r.setRelType(entityType + entityType.substring(0, 1).toUpperCase() + entityType.substring(1));
r.setSubRelType("dedup");
DataInfo info = new DataInfo();
info.setDeletedbyinference(false);
info.setInferred(true);
info.setInvisible(false);
info.setInferenceprovenance(dedupConf.getWf().getConfigurationId());
Qualifier provenanceAction = new Qualifier();
provenanceAction.setClassid(PROVENANCE_ACTION_CLASS);
provenanceAction.setClassname(PROVENANCE_ACTION_CLASS);
provenanceAction.setSchemeid(DNET_PROVENANCE_ACTIONS);
provenanceAction.setSchemename(DNET_PROVENANCE_ACTIONS);
info.setProvenanceaction(provenanceAction);
// TODO calculate the trust value based on the similarity score of the elements in the CC
// info.setTrust();
r.setDataInfo(info);
return r;
}
}

View File

@ -87,8 +87,8 @@
<action name="resetOrgSimRels">
<fs>
<delete path="${workingPath}/${actionSetId}/organization_simrel"/>
<delete path="${workingPath}/${actionSetId}/organization_mergerel"/>
<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_simrel"/>
<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_mergerel"/>
</fs>
<ok to="CreateSimRels"/>
<error to="Kill"/>
@ -113,7 +113,7 @@
</spark-opts>
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--numPartitions</arg><arg>8000</arg>
</spark>
@ -142,7 +142,7 @@
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
<arg>--numPartitions</arg><arg>8000</arg>
</spark>
<ok to="CreateMergeRels"/>
@ -169,7 +169,7 @@
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
<arg>--cutConnectedComponent</arg><arg>${cutConnectedComponent}</arg>
</spark>
<ok to="PrepareOrgRels"/>
@ -196,7 +196,7 @@
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
<arg>--dbUrl</arg><arg>${dbUrl}</arg>
<arg>--dbTable</arg><arg>${dbTable}</arg>
<arg>--dbUser</arg><arg>${dbUser}</arg>
@ -227,7 +227,7 @@
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
<arg>--apiUrl</arg><arg>${apiUrl}</arg>
<arg>--dbUrl</arg><arg>${dbUrl}</arg>
<arg>--dbTable</arg><arg>${dbTable}</arg>

View File

@ -92,11 +92,23 @@
<action name="resetWorkingPath">
<fs>
<delete path="${workingPath}"/>
<delete path="${dedupGraphPath}"/>
</fs>
<ok to="CreateSimRel"/>
<error to="Kill"/>
</action>
<!--<action name="testOpenorgs">-->
<!--<fs>-->
<!--<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_simrel"/>-->
<!--<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_mergerel"/>-->
<!--<delete path="${workingPath}/${actionSetIdOpenorgs}/organization_deduprecord"/>-->
<!--<delete path="${dedupGraphPath}"/>-->
<!--</fs>-->
<!--<ok to="CopyOpenorgsMergeRels"/>-->
<!--<error to="Kill"/>-->
<!--</action>-->
<action name="CreateSimRel">
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
@ -182,7 +194,7 @@
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Copy Merge Relations</name>
<name>Copy Openorgs Merge Relations</name>
<class>eu.dnetlib.dhp.oa.dedup.SparkCopyOpenorgsMergeRels</class>
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
<spark-opts>
@ -201,7 +213,7 @@
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
<arg>--numPartitions</arg><arg>8000</arg>
</spark>
<ok to="CopyEntities"/>
<ok to="CopyOpenorgs"/>
<error to="Kill"/>
</action>
@ -210,7 +222,7 @@
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Copy Entities</name>
<name>Copy Openorgs Entities</name>
<class>eu.dnetlib.dhp.oa.dedup.SparkCopyOpenorgs</class>
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
<spark-opts>
@ -225,7 +237,6 @@
</spark-opts>
<arg>--graphBasePath</arg><arg>${graphBasePath}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
<arg>--actionSetId</arg><arg>${actionSetIdOpenorgs}</arg>
</spark>
<ok to="UpdateEntity"/>
@ -262,7 +273,7 @@
<spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master>
<mode>cluster</mode>
<name>Update Entity</name>
<name>Copy Non-Openorgs Relations</name>
<class>eu.dnetlib.dhp.oa.dedup.SparkCopyRelationsNoOpenorgs</class>
<jar>dhp-dedup-openaire-${projectVersion}.jar</jar>
<spark-opts>

View File

@ -19,9 +19,11 @@ import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.*;
import org.junit.jupiter.api.extension.ExtendWith;
@ -61,7 +63,7 @@ public class SparkOpenorgsTest implements Serializable {
public static void cleanUp() throws IOException, URISyntaxException {
testGraphBasePath = Paths
.get(SparkOpenorgsTest.class.getResource("/eu/dnetlib/dhp/dedup/entities").toURI())
.get(SparkOpenorgsTest.class.getResource("/eu/dnetlib/dhp/dedup/openorgs").toURI())
.toFile()
.getAbsolutePath();
testOutputBasePath = createTempDirectory(SparkOpenorgsTest.class.getSimpleName() + "-")
@ -71,9 +73,8 @@ public class SparkOpenorgsTest implements Serializable {
.toAbsolutePath()
.toString();
// FileUtils.deleteDirectory(new File(testOutputBasePath));
FileUtils.deleteDirectory(new File(testOutputBasePath));
FileUtils.deleteDirectory(new File(testDedupGraphBasePath));
FileUtils.deleteDirectory(new File("/tmp/test-orchestrator/organization_openorgs_mergerels"));
final SparkConf conf = new SparkConf();
conf.set("spark.sql.shuffle.partitions", "200");
@ -133,7 +134,7 @@ public class SparkOpenorgsTest implements Serializable {
.textFile(testOutputBasePath + "/" + testActionSetId + "/organization_deduprecord")
.count();
assertEquals(0, orgs_deduprecord);
assertEquals(100, orgs_deduprecord);
}
@Test
@ -161,7 +162,7 @@ public class SparkOpenorgsTest implements Serializable {
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
.count();
assertEquals(0, orgs_mergerel);
assertEquals(6, orgs_mergerel);
}
@ -190,67 +191,7 @@ public class SparkOpenorgsTest implements Serializable {
.textFile(testOutputBasePath + "/" + testActionSetId + "/organization_simrel")
.count();
System.out.println("orgs_simrel = " + orgs_simrel);
}
@Test
public void createSimRelsTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
SparkCreateSimRels.class
.getResourceAsStream(
"/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
parser
.parseArgument(
new String[] {
"-i", testGraphBasePath,
"-asi", testActionSetId,
"-la", "lookupurl",
"-w", "/tmp",
"-np", "50"
});
new SparkCreateSimRels(parser, spark).run(isLookUpService);
long orgs_simrel = spark
.read()
.textFile("/tmp/" + testActionSetId + "/organization_simrel")
.count();
assertEquals(3082, orgs_simrel);
}
@Test
public void createMergeRelsTest() throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
SparkCreateMergeRels.class
.getResourceAsStream(
"/eu/dnetlib/dhp/oa/dedup/createCC_parameters.json")));
parser
.parseArgument(
new String[] {
"-i",
testGraphBasePath,
"-asi",
testActionSetId,
"-la",
"lookupurl",
"-w",
"/tmp"
});
new SparkCreateMergeRels(parser, spark).run(isLookUpService);
long orgs_mergerel = spark
.read()
.load("/tmp/" + testActionSetId + "/organization_mergerel")
.count();
assertEquals(1272, orgs_mergerel);
assertEquals(96, orgs_simrel);
}
@Test
@ -273,9 +214,7 @@ public class SparkOpenorgsTest implements Serializable {
long relations = jsc.textFile(testDedupGraphBasePath + "/relation").count();
// Dataset<Relation> relsRDD = spark.read().textFile(testDedupGraphBasePath + "/relation").map(patchRelFn(), Encoders.bean(Relation.class));
assertEquals(500, relations);
assertEquals(400, relations);
}
@AfterAll