structure for sparksimrel changed to be compliant with mockito testing

This commit is contained in:
miconis 2020-04-02 18:37:53 +02:00
parent bfa5bc74df
commit a61763d149
6 changed files with 184 additions and 99 deletions

View File

@ -0,0 +1,83 @@
package eu.dnetlib.dhp.oa.dedup;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.pace.config.DedupConfig;
import org.apache.commons.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.SparkSession;
import org.dom4j.Document;
import org.dom4j.DocumentException;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import scala.xml.Elem;
import java.io.IOException;
import java.io.Serializable;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
abstract class AbstractSparkAction implements Serializable {
public ArgumentApplicationParser parser; //parameters for the spark action
public ISLookUpService isLookUpService; //lookup service to take dedupconfig
public AbstractSparkAction(ArgumentApplicationParser parser, ISLookUpService isLookUpService) throws Exception {
this.parser = parser;
this.isLookUpService = isLookUpService;
}
public List<DedupConfig> getConfigurations(String orchestrator) throws ISLookUpException, DocumentException, IOException {
final String xquery = String.format("/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
final Document doc = new SAXReader().read(new StringReader(orchestratorProfile));
final String actionSetId = doc.valueOf("//DEDUPLICATION/ACTION_SET/@id");
final List<DedupConfig> configurations = new ArrayList<>();
for (final Object o : doc.selectNodes("//SCAN_SEQUENCE/SCAN")) {
configurations.add(loadConfig(isLookUpService, actionSetId, o));
}
return configurations;
}
public DedupConfig loadConfig(final ISLookUpService isLookUpService, final String actionSetId, final Object o)
throws ISLookUpException, IOException {
final Element s = (Element) o;
final String configProfileId = s.attributeValue("id");
final String conf =
isLookUpService.getResourceProfileByQuery(String.format(
"for $x in /RESOURCE_PROFILE[.//RESOURCE_IDENTIFIER/@value = '%s'] return $x//DEDUPLICATION/text()",
configProfileId));
DedupConfig dedupConfig = new ObjectMapper().readValue(conf, DedupConfig.class);
dedupConfig.getPace().initModel();
dedupConfig.getPace().initTranslationMap();
dedupConfig.getWf().setConfigurationId(actionSetId);
return dedupConfig;
}
abstract void run() throws DocumentException, IOException, ISLookUpException;
protected SparkSession getSparkSession(ArgumentApplicationParser parser) {
SparkConf conf = new SparkConf();
return SparkSession
.builder()
.appName(SparkCreateSimRels.class.getSimpleName())
.master(parser.get("master"))
.config(conf)
.getOrCreate();
}
}

View File

@ -28,25 +28,31 @@ import org.dom4j.Element;
import org.dom4j.io.SAXReader; import org.dom4j.io.SAXReader;
import scala.Tuple2; import scala.Tuple2;
import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.io.StringReader; import java.io.StringReader;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
public class SparkCreateSimRels implements Serializable { public class SparkCreateSimRels extends AbstractSparkAction {
private static final Log log = LogFactory.getLog(SparkCreateSimRels.class); private static final Log log = LogFactory.getLog(SparkCreateSimRels.class);
public SparkCreateSimRels(ArgumentApplicationParser parser, ISLookUpService isLookUpService) throws Exception {
super(parser, isLookUpService);
}
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils.toString( IOUtils.toString(
SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json"))); SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
parser.parseArgument(args); parser.parseArgument(args);
new SparkCreateSimRels().run(parser); new SparkCreateSimRels(parser, ISLookupClientFactory.getLookUpService(parser.get("isLookUpUrl"))).run();
} }
private void run(ArgumentApplicationParser parser) throws ISLookUpException, DocumentException { @Override
public void run() throws DocumentException, IOException, ISLookUpException {
//read oozie parameters //read oozie parameters
final String graphBasePath = parser.get("graphBasePath"); final String graphBasePath = parser.get("graphBasePath");
@ -59,38 +65,35 @@ public class SparkCreateSimRels implements Serializable {
System.out.println(String.format("actionSetId: '%s'", actionSetId)); System.out.println(String.format("actionSetId: '%s'", actionSetId));
System.out.println(String.format("workingPath: '%s'", workingPath)); System.out.println(String.format("workingPath: '%s'", workingPath));
List<DedupConfig> configurations = getConfigurations(isLookUpUrl, actionSetId); try (SparkSession spark = getSparkSession(parser)) {
System.out.println("configurations = " + configurations.size()); final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
// try (SparkSession spark = getSparkSession(parser)) { //for each dedup configuration
// final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); for (DedupConfig dedupConf: getConfigurations(actionSetId)) {
// final String entity = dedupConf.getWf().getEntityType();
// //for each dedup configuration final String subEntity = dedupConf.getWf().getSubEntityValue();
// for (DedupConfig dedupConf: getConfigurations(isLookUpUrl, actionSetId)) {
// final String entity = dedupConf.getWf().getEntityType(); JavaPairRDD<String, MapDocument> mapDocument = sc.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity))
// final String subEntity = dedupConf.getWf().getSubEntityValue(); .mapToPair((PairFunction<String, String, MapDocument>) s -> {
// MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
// JavaPairRDD<String, MapDocument> mapDocument = sc.textFile(DedupUtility.createEntityPath(graphBasePath, subEntity)) return new Tuple2<String, MapDocument>(d.getIdentifier(), d);
// .mapToPair((PairFunction<String, String, MapDocument>) s -> { });
// MapDocument d = MapDocumentUtil.asMapDocumentWithJPath(dedupConf, s);
// return new Tuple2<String, MapDocument>(d.getIdentifier(), d); //create blocks for deduplication
// }); JavaPairRDD<String, List<MapDocument>> blocks = Deduper.createsortedBlocks(sc, mapDocument, dedupConf);
//
// //create blocks for deduplication //create relations by comparing only elements in the same group
// JavaPairRDD<String, List<MapDocument>> blocks = Deduper.createsortedBlocks(sc, mapDocument, dedupConf); final JavaPairRDD<String, String> dedupRels = Deduper.computeRelations2(sc, blocks, dedupConf);
//
// //create relations by comparing only elements in the same group JavaRDD<Relation> relationsRDD = dedupRels.map(r -> createSimRel(r._1(), r._2(), entity));
// final JavaPairRDD<String, String> dedupRels = Deduper.computeRelations2(sc, blocks, dedupConf);
// //save the simrel in the workingdir
// JavaRDD<Relation> relationsRDD = dedupRels.map(r -> createSimRel(r._1(), r._2(), entity)); spark.createDataset(relationsRDD.rdd(), Encoders.bean(Relation.class))
// .write()
// //save the simrel in the workingdir .mode("overwrite")
// spark.createDataset(relationsRDD.rdd(), Encoders.bean(Relation.class)) .save(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity));
// .write() }
// .mode("overwrite") }
// .save(DedupUtility.createSimRelPath(workingPath, actionSetId, subEntity));
// }
// }
} }
/** /**
@ -130,49 +133,4 @@ public class SparkCreateSimRels implements Serializable {
} }
return r; return r;
} }
private static SparkSession getSparkSession(ArgumentApplicationParser parser) {
SparkConf conf = new SparkConf();
return SparkSession
.builder()
.appName(SparkCreateSimRels.class.getSimpleName())
.master(parser.get("master"))
.config(conf)
.getOrCreate();
}
public List<DedupConfig> getConfigurations(String isLookUpUrl, String orchestrator) throws ISLookUpException, DocumentException {
final ISLookUpService isLookUpService = ISLookupClientFactory.getLookUpService(isLookUpUrl);
final String xquery = String.format("/RESOURCE_PROFILE[.//DEDUPLICATION/ACTION_SET/@id = '%s']", orchestrator);
String orchestratorProfile = isLookUpService.getResourceProfileByQuery(xquery);
final Document doc = new SAXReader().read(new StringReader(orchestratorProfile));
final String actionSetId = doc.valueOf("//DEDUPLICATION/ACTION_SET/@id");
final List<DedupConfig> configurations = new ArrayList<>();
for (final Object o : doc.selectNodes("//SCAN_SEQUENCE/SCAN")) {
configurations.add(loadConfig(isLookUpService, actionSetId, o));
}
return configurations;
}
public DedupConfig loadConfig(final ISLookUpService isLookUpService, final String actionSetId, final Object o)
throws ISLookUpException {
final Element s = (Element) o;
final String configProfileId = s.attributeValue("id");
final String conf =
isLookUpService.getResourceProfileByQuery(String.format(
"for $x in /RESOURCE_PROFILE[.//RESOURCE_IDENTIFIER/@value = '%s'] return $x//DEDUPLICATION/text()",
configProfileId));
final DedupConfig dedupConfig = DedupConfig.load(conf);
dedupConfig.getWf().setConfigurationId(actionSetId);
return dedupConfig;
}
} }

View File

@ -1,4 +1,4 @@
package eu.dnetlib.dhp.oa.dedup.dedup; package eu.dnetlib.dhp.oa.dedup;
import eu.dnetlib.dhp.oa.dedup.DedupUtility; import eu.dnetlib.dhp.oa.dedup.DedupUtility;
import eu.dnetlib.dhp.schema.oaf.Publication; import eu.dnetlib.dhp.schema.oaf.Publication;

View File

@ -1,56 +1,67 @@
package eu.dnetlib.dhp.oa.dedup.dedup; package eu.dnetlib.dhp.oa.dedup;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.hash.HashFunction; import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import eu.dnetlib.dhp.oa.dedup.SparkCreateSimRels; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
import eu.dnetlib.pace.config.DedupConfig; import eu.dnetlib.pace.config.DedupConfig;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.dom4j.DocumentException; import org.dom4j.DocumentException;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoExtension;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
import static org.mockito.Mockito.withSettings;
@ExtendWith(MockitoExtension.class) @ExtendWith(MockitoExtension.class)
public class SparkDedupTest { public class SparkDedupTest {
@Mock ISLookUpService isLookUpService = mock(ISLookUpService.class, withSettings().serializable());
SparkCreateSimRels sparkCreateSimRels;
public List<DedupConfig> prepareConfigurations() throws IOException {
return Lists.newArrayList(
DedupConfig.load(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json"))),
DedupConfig.load(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")))
);
}
@BeforeEach @BeforeEach
public void setUp() throws IOException, ISLookUpException, DocumentException { public void setUp() throws IOException, ISLookUpException, DocumentException {
when(sparkCreateSimRels.getConfigurations(anyString(), anyString())).thenReturn(prepareConfigurations()); withSettings().serializable();
when(isLookUpService.getResourceProfileByQuery(Mockito.contains("test-orchestrator")))
.thenReturn(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/profiles/mock_orchestrator.xml")));
when(isLookUpService.getResourceProfileByQuery(Mockito.contains("organization")))
.thenReturn(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")));
when(isLookUpService.getResourceProfileByQuery(Mockito.contains("publication")))
.thenReturn(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/pub.curr.conf.json")));
} }
@Test @Test
public void createSimRelsTest() throws Exception { public void createSimRelsTest() throws Exception {
SparkCreateSimRels.main(new String[]{
ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils.toString(
SparkCreateSimRels.class.getResourceAsStream("/eu/dnetlib/dhp/oa/dedup/createSimRels_parameters.json")));
parser.parseArgument(new String[]{
"-mt", "local[*]", "-mt", "local[*]",
"-i", "/Users/miconis/dumps", "-i", "/Users/miconis/dumps",
"-asi", "dedup-similarity-result-levenstein", "-asi", "test-orchestrator",
"-la", "lookupurl", "-la", "lookupurl",
"-w", "workingPath" "-w", "workingPath"});
});
new SparkCreateSimRels(parser, isLookUpService).run();
} }
// @Disabled("must be parametrized to run locally") // @Disabled("must be parametrized to run locally")
@ -93,4 +104,12 @@ public class SparkDedupTest {
System.out.println(s2.hashCode()); System.out.println(s2.hashCode());
System.out.println(hashFunction.hashString(s2).asLong()); System.out.println(hashFunction.hashString(s2).asLong());
} }
public List<DedupConfig> prepareConfigurations() throws IOException {
return Lists.newArrayList(
DedupConfig.load(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json"))),
DedupConfig.load(IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")))
);
}
} }

View File

@ -1,4 +1,4 @@
package eu.dnetlib.dhp.oa.dedup.dedup.jpath; package eu.dnetlib.dhp.oa.dedup.jpath;
import eu.dnetlib.pace.config.DedupConfig; import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.model.MapDocument; import eu.dnetlib.pace.model.MapDocument;

View File

@ -0,0 +1,25 @@
<RESOURCE_PROFILE>
<HEADER>
<RESOURCE_IDENTIFIER value=""/>
<RESOURCE_TYPE value="DedupOrchestrationDSResourceType"/>
<RESOURCE_KIND value="DedupOrchestrationDSResources"/>
<RESOURCE_URI value=""/>
<DATE_OF_CREATION value="2001-12-31T12:00:00"/>
</HEADER>
<BODY>
<CONFIGURATION enabled="true">
<DEDUPLICATION>
<ENTITY code="20" label="Organization" name="organization"/>
<ACTION_SET id="test-orchestrator"/>
<SCAN_SEQUENCE>
<SCAN id="organization"/>
<SCAN id="publication"/>
</SCAN_SEQUENCE>
</DEDUPLICATION>
</CONFIGURATION>
<STATUS>
<LAST_UPDATE value="2001-12-31T12:00:00"/>
</STATUS>
<SECURITY_PARAMETERS>SECURITY_PARAMETERS</SECURITY_PARAMETERS>
</BODY>
</RESOURCE_PROFILE>