2020-04-30 11:05:17 +02:00
|
|
|
|
2020-04-21 16:03:51 +02:00
|
|
|
package eu.dnetlib.dhp.resulttocommunityfromsemrel;
|
|
|
|
|
2020-04-27 10:59:53 +02:00
|
|
|
import static org.apache.spark.sql.functions.desc;
|
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.nio.file.Files;
|
|
|
|
import java.nio.file.Path;
|
|
|
|
import java.util.List;
|
2020-04-30 11:05:17 +02:00
|
|
|
|
2020-04-27 10:59:53 +02:00
|
|
|
import org.apache.commons.io.FileUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
|
|
|
import org.apache.spark.api.java.JavaRDD;
|
|
|
|
import org.apache.spark.api.java.JavaSparkContext;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.Row;
|
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.junit.jupiter.api.AfterAll;
|
|
|
|
import org.junit.jupiter.api.Assertions;
|
|
|
|
import org.junit.jupiter.api.BeforeAll;
|
|
|
|
import org.junit.jupiter.api.Test;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
|
|
|
|
|
|
import eu.dnetlib.dhp.orcidtoresultfromsemrel.OrcidPropagationJobTest;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
|
|
|
|
2020-04-21 16:03:51 +02:00
|
|
|
public class ResultToCommunityJobTest {
|
2020-04-27 10:59:53 +02:00
|
|
|
|
2020-04-30 11:05:17 +02:00
|
|
|
private static final Logger log = LoggerFactory
|
|
|
|
.getLogger(
|
|
|
|
eu.dnetlib.dhp.resulttocommunityfromsemrel.ResultToCommunityJobTest.class);
|
|
|
|
|
|
|
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
|
|
|
|
|
|
|
private static final ClassLoader cl = eu.dnetlib.dhp.resulttocommunityfromsemrel.ResultToCommunityJobTest.class
|
|
|
|
.getClassLoader();
|
|
|
|
|
|
|
|
private static SparkSession spark;
|
|
|
|
|
|
|
|
private static Path workingDir;
|
|
|
|
|
|
|
|
@BeforeAll
|
|
|
|
public static void beforeAll() throws IOException {
|
|
|
|
workingDir = Files
|
|
|
|
.createTempDirectory(
|
|
|
|
eu.dnetlib.dhp.resulttocommunityfromsemrel.ResultToCommunityJobTest.class
|
|
|
|
.getSimpleName());
|
|
|
|
log.info("using work dir {}", workingDir);
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
conf
|
|
|
|
.setAppName(
|
|
|
|
eu.dnetlib.dhp.resulttocommunityfromsemrel.ResultToCommunityJobTest.class
|
|
|
|
.getSimpleName());
|
|
|
|
|
|
|
|
conf.setMaster("local[*]");
|
|
|
|
conf.set("spark.driver.host", "localhost");
|
|
|
|
conf.set("hive.metastore.local", "true");
|
|
|
|
conf.set("spark.ui.enabled", "false");
|
|
|
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
|
|
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
|
|
|
|
|
|
|
spark = SparkSession
|
|
|
|
.builder()
|
|
|
|
.appName(OrcidPropagationJobTest.class.getSimpleName())
|
|
|
|
.config(conf)
|
|
|
|
.getOrCreate();
|
|
|
|
}
|
|
|
|
|
|
|
|
@AfterAll
|
|
|
|
public static void afterAll() throws IOException {
|
|
|
|
FileUtils.deleteDirectory(workingDir.toFile());
|
|
|
|
spark.stop();
|
|
|
|
}
|
|
|
|
|
|
|
|
@Test
|
|
|
|
public void test1() throws Exception {
|
2020-05-07 18:22:26 +02:00
|
|
|
SparkResultToCommunityThroughSemRelJob
|
2020-04-30 11:05:17 +02:00
|
|
|
.main(
|
|
|
|
new String[] {
|
|
|
|
"-isTest", Boolean.TRUE.toString(),
|
|
|
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
|
|
"-sourcePath",
|
|
|
|
getClass()
|
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/resulttocommunityfromsemrel/sample")
|
|
|
|
.getPath(),
|
|
|
|
"-hive_metastore_uris", "",
|
|
|
|
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
|
|
|
|
"-outputPath", workingDir.toString() + "/dataset",
|
|
|
|
"-preparedInfoPath",
|
|
|
|
getClass()
|
|
|
|
.getResource(
|
|
|
|
"/eu/dnetlib/dhp/resulttocommunityfromsemrel/preparedInfo")
|
|
|
|
.getPath()
|
|
|
|
});
|
|
|
|
|
|
|
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
|
|
|
|
|
|
|
JavaRDD<Dataset> tmp = sc
|
|
|
|
.textFile(workingDir.toString() + "/dataset")
|
|
|
|
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
|
|
|
|
|
|
|
Assertions.assertEquals(10, tmp.count());
|
|
|
|
org.apache.spark.sql.Dataset<Dataset> verificationDataset = spark
|
|
|
|
.createDataset(tmp.rdd(), Encoders.bean(Dataset.class));
|
|
|
|
|
|
|
|
verificationDataset.createOrReplaceTempView("dataset");
|
|
|
|
|
|
|
|
String query = "select id, MyT.id community "
|
|
|
|
+ "from dataset "
|
|
|
|
+ "lateral view explode(context) c as MyT "
|
|
|
|
+ "lateral view explode(MyT.datainfo) d as MyD "
|
|
|
|
+ "where MyD.inferenceprovenance = 'propagation'";
|
|
|
|
|
|
|
|
org.apache.spark.sql.Dataset<Row> resultExplodedProvenance = spark.sql(query);
|
|
|
|
Assertions.assertEquals(5, resultExplodedProvenance.count());
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
0,
|
|
|
|
resultExplodedProvenance
|
|
|
|
.filter("id = '50|dedup_wf_001::2305908abeca9da37eaf3bddcaf81b7b'")
|
|
|
|
.count());
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
1,
|
|
|
|
resultExplodedProvenance
|
|
|
|
.filter("id = '50|dedup_wf_001::0489ae524201eedaa775da282dce35e7'")
|
|
|
|
.count());
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
"dh-ch",
|
|
|
|
resultExplodedProvenance
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultExplodedProvenance
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::0489ae524201eedaa775da282dce35e7"))
|
|
|
|
.collectAsList()
|
|
|
|
.get(0)
|
|
|
|
.getString(0));
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
3,
|
|
|
|
resultExplodedProvenance
|
|
|
|
.filter("id = '50|dedup_wf_001::0a60e33b4f0986ebd9819451f2d87a28'")
|
|
|
|
.count());
|
|
|
|
List<Row> rowList = resultExplodedProvenance
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultExplodedProvenance
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::0a60e33b4f0986ebd9819451f2d87a28"))
|
|
|
|
.sort(desc("community"))
|
|
|
|
.collectAsList();
|
|
|
|
Assertions.assertEquals("mes", rowList.get(0).getString(0));
|
|
|
|
Assertions.assertEquals("fam", rowList.get(1).getString(0));
|
|
|
|
Assertions.assertEquals("ee", rowList.get(2).getString(0));
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
1,
|
|
|
|
resultExplodedProvenance
|
|
|
|
.filter("id = '50|dedup_wf_001::0ae02edb5598a5545d10b107fcf48dcc'")
|
|
|
|
.count());
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
"aginfra",
|
|
|
|
resultExplodedProvenance
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultExplodedProvenance
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::0ae02edb5598a5545d10b107fcf48dcc"))
|
|
|
|
.collectAsList()
|
|
|
|
.get(0)
|
|
|
|
.getString(0));
|
|
|
|
|
|
|
|
query = "select id, MyT.id community "
|
|
|
|
+ "from dataset "
|
|
|
|
+ "lateral view explode(context) c as MyT "
|
|
|
|
+ "lateral view explode(MyT.datainfo) d as MyD ";
|
|
|
|
|
|
|
|
org.apache.spark.sql.Dataset<Row> resultCommunityId = spark.sql(query);
|
|
|
|
|
|
|
|
Assertions.assertEquals(10, resultCommunityId.count());
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
2,
|
|
|
|
resultCommunityId
|
|
|
|
.filter("id = '50|dedup_wf_001::0489ae524201eedaa775da282dce35e7'")
|
|
|
|
.count());
|
|
|
|
rowList = resultCommunityId
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultCommunityId
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::0489ae524201eedaa775da282dce35e7"))
|
|
|
|
.sort(desc("community"))
|
|
|
|
.collectAsList();
|
|
|
|
Assertions.assertEquals("dh-ch", rowList.get(0).getString(0));
|
|
|
|
Assertions.assertEquals("beopen", rowList.get(1).getString(0));
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
3,
|
|
|
|
resultCommunityId
|
|
|
|
.filter("id = '50|dedup_wf_001::0a60e33b4f0986ebd9819451f2d87a28'")
|
|
|
|
.count());
|
|
|
|
rowList = resultCommunityId
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultCommunityId
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::0a60e33b4f0986ebd9819451f2d87a28"))
|
|
|
|
.sort(desc("community"))
|
|
|
|
.collectAsList();
|
|
|
|
Assertions.assertEquals("mes", rowList.get(0).getString(0));
|
|
|
|
Assertions.assertEquals("fam", rowList.get(1).getString(0));
|
|
|
|
Assertions.assertEquals("ee", rowList.get(2).getString(0));
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
2,
|
|
|
|
resultCommunityId
|
|
|
|
.filter("id = '50|dedup_wf_001::0ae02edb5598a5545d10b107fcf48dcc'")
|
|
|
|
.count());
|
|
|
|
rowList = resultCommunityId
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultCommunityId
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::0ae02edb5598a5545d10b107fcf48dcc"))
|
|
|
|
.sort(desc("community"))
|
|
|
|
.collectAsList();
|
|
|
|
Assertions.assertEquals("beopen", rowList.get(0).getString(0));
|
|
|
|
Assertions.assertEquals("aginfra", rowList.get(1).getString(0));
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
2,
|
|
|
|
resultCommunityId
|
|
|
|
.filter("id = '50|dedup_wf_001::2305908abeca9da37eaf3bddcaf81b7b'")
|
|
|
|
.count());
|
|
|
|
rowList = resultCommunityId
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultCommunityId
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|dedup_wf_001::2305908abeca9da37eaf3bddcaf81b7b"))
|
|
|
|
.sort(desc("community"))
|
|
|
|
.collectAsList();
|
|
|
|
Assertions.assertEquals("euromarine", rowList.get(1).getString(0));
|
|
|
|
Assertions.assertEquals("ni", rowList.get(0).getString(0));
|
|
|
|
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
1,
|
|
|
|
resultCommunityId
|
|
|
|
.filter("id = '50|doajarticles::8d817039a63710fcf97e30f14662c6c8'")
|
|
|
|
.count());
|
|
|
|
Assertions
|
|
|
|
.assertEquals(
|
|
|
|
"euromarine",
|
|
|
|
resultCommunityId
|
|
|
|
.select("community")
|
|
|
|
.where(
|
|
|
|
resultCommunityId
|
|
|
|
.col("id")
|
|
|
|
.equalTo(
|
|
|
|
"50|doajarticles::8d817039a63710fcf97e30f14662c6c8"))
|
|
|
|
.collectAsList()
|
|
|
|
.get(0)
|
|
|
|
.getString(0));
|
|
|
|
}
|
2020-04-21 16:03:51 +02:00
|
|
|
}
|