[ENRICHMENT][BETA] Use of community API in enrichment process AND addition to tagging result for communities through projects #359
|
@ -60,8 +60,6 @@ public class SparkResultToCommunityFromOrganizationJob {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
// removeOutputDir(spark, outputPath);
|
|
||||||
|
|
||||||
execPropagation(spark, inputPath, outputPath, possibleupdatespath);
|
execPropagation(spark, inputPath, outputPath, possibleupdatespath);
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
|
@ -40,7 +40,9 @@ public class BulkTagJobTest {
|
||||||
"\"fos\" : \"$['subject'][?(@['qualifier']['classid']=='FOS')].value\"," +
|
"\"fos\" : \"$['subject'][?(@['qualifier']['classid']=='FOS')].value\"," +
|
||||||
"\"sdg\" : \"$['subject'][?(@['qualifier']['classid']=='SDG')].value\"," +
|
"\"sdg\" : \"$['subject'][?(@['qualifier']['classid']=='SDG')].value\"," +
|
||||||
"\"hostedby\" : \"$['instance'][*]['hostedby']['key']\" , " +
|
"\"hostedby\" : \"$['instance'][*]['hostedby']['key']\" , " +
|
||||||
"\"collectedfrom\" : \"$['instance'][*]['collectedfrom']['key']\"} ";
|
"\"collectedfrom\" : \"$['instance'][*]['collectedfrom']['key']\"," +
|
||||||
|
"\"publisher\":\"$['publisher'].value\"," +
|
||||||
|
"\"publicationyear\":\"$['dateofacceptance'].value\"} ";
|
||||||
|
|
||||||
private static SparkSession spark;
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
|
|
@ -1,95 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.resulttocommunityfromorganization;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.spark.SparkConf;
|
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
|
||||||
import org.apache.spark.sql.Encoders;
|
|
||||||
import org.apache.spark.sql.SparkSession;
|
|
||||||
import org.junit.jupiter.api.AfterAll;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
||||||
import com.google.gson.Gson;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.api.Utils;
|
|
||||||
import eu.dnetlib.dhp.api.model.CommunityEntityMap;
|
|
||||||
import eu.dnetlib.dhp.bulktag.BulkTagJobTest;
|
|
||||||
import eu.dnetlib.dhp.bulktag.SparkBulkTagJob;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @author miriam.baglioni
|
|
||||||
* @Date 13/10/23
|
|
||||||
*/
|
|
||||||
public class PrepareAssocTest {
|
|
||||||
|
|
||||||
private static SparkSession spark;
|
|
||||||
|
|
||||||
private static Path workingDir;
|
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(PrepareAssocTest.class);
|
|
||||||
|
|
||||||
@BeforeAll
|
|
||||||
public static void beforeAll() throws IOException {
|
|
||||||
workingDir = Files.createTempDirectory(BulkTagJobTest.class.getSimpleName());
|
|
||||||
log.info("using work dir {}", workingDir);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
|
||||||
conf.setAppName(BulkTagJobTest.class.getSimpleName());
|
|
||||||
|
|
||||||
conf.setMaster("local[*]");
|
|
||||||
conf.set("spark.driver.host", "localhost");
|
|
||||||
conf.set("hive.metastore.local", "true");
|
|
||||||
conf.set("spark.ui.enabled", "false");
|
|
||||||
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
|
||||||
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
|
||||||
|
|
||||||
spark = SparkSession
|
|
||||||
.builder()
|
|
||||||
.appName(PrepareAssocTest.class.getSimpleName())
|
|
||||||
.config(conf)
|
|
||||||
.getOrCreate();
|
|
||||||
}
|
|
||||||
|
|
||||||
@AfterAll
|
|
||||||
public static void afterAll() throws IOException {
|
|
||||||
FileUtils.deleteDirectory(workingDir.toFile());
|
|
||||||
spark.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void test1() throws Exception {
|
|
||||||
|
|
||||||
PrepareResultCommunitySet
|
|
||||||
.main(
|
|
||||||
new String[] {
|
|
||||||
|
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
"-sourcePath",
|
|
||||||
getClass().getResource("/eu/dnetlib/dhp/resulttocommunityfromorganization/relation/").getPath(),
|
|
||||||
"-outputPath", workingDir.toString() + "/prepared",
|
|
||||||
"-production", Boolean.TRUE.toString(),
|
|
||||||
"-hive_metastore_uris", ""
|
|
||||||
});
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
||||||
|
|
||||||
JavaRDD<ResultCommunityList> tmp = sc
|
|
||||||
.textFile(workingDir.toString() + "/prepared")
|
|
||||||
.map(item -> new ObjectMapper().readValue(item, ResultCommunityList.class));
|
|
||||||
|
|
||||||
tmp.foreach(r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -72,15 +72,13 @@ public class ResultToCommunityJobTest {
|
||||||
SparkResultToCommunityFromOrganizationJob
|
SparkResultToCommunityFromOrganizationJob
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isTest", Boolean.TRUE.toString(),
|
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-sourcePath", getClass()
|
"-sourcePath", getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/resulttocommunityfromorganization/sample")
|
.getResource("/eu/dnetlib/dhp/resulttocommunityfromorganization/sample/")
|
||||||
.getPath(),
|
.getPath(),
|
||||||
"-hive_metastore_uris", "",
|
|
||||||
|
|
||||||
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
|
"-outputPath", workingDir.toString() + "/",
|
||||||
"-outputPath", workingDir.toString() + "/dataset",
|
|
||||||
"-preparedInfoPath", preparedInfoPath
|
"-preparedInfoPath", preparedInfoPath
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue