[Enrichment single step] changed test for country propagation and fixed inconsistencies
This commit is contained in:
parent
2e6af7c655
commit
481c4e28d1
|
@ -80,10 +80,9 @@ public class SparkBulkTagJob {
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark ->
|
||||||
removeOutputDir(spark, outputPath);
|
execBulkTag(spark, inputPath, outputPath, protoMappingParams, resultClazz, cc)
|
||||||
execBulkTag(spark, inputPath, outputPath, protoMappingParams, resultClazz, cc);
|
);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void execBulkTag(
|
private static <R extends Result> void execBulkTag(
|
||||||
|
|
|
@ -16,6 +16,7 @@ import javax.print.attribute.DocAttributeSet;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.ForeachFunction;
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
|
@ -34,6 +35,7 @@ import eu.dnetlib.dhp.bulktag.community.*;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author miriam.baglioni
|
* @author miriam.baglioni
|
||||||
|
@ -44,6 +46,11 @@ public class SparkEoscBulkTag implements Serializable {
|
||||||
private static final Logger log = LoggerFactory.getLogger(SparkEoscBulkTag.class);
|
private static final Logger log = LoggerFactory.getLogger(SparkEoscBulkTag.class);
|
||||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static String OPENAIRE_3 = "openaire3.0";
|
||||||
|
private static String OPENAIRE_4 = "openaire-pub_4.0";
|
||||||
|
private static String OPENAIRE_CRIS = "openaire-cris_1.1";
|
||||||
|
private static String OPENAIRE_DATA = "openaire2.0_data";
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String jsonConfiguration = IOUtils
|
String jsonConfiguration = IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
|
@ -72,6 +79,9 @@ public class SparkEoscBulkTag implements Serializable {
|
||||||
final String resultClassName = parser.get("resultTableName");
|
final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
|
final String resultType = parser.get("resultType");
|
||||||
|
log.info("resultType: {}", resultType);
|
||||||
|
|
||||||
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
@ -82,41 +92,71 @@ public class SparkEoscBulkTag implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
removeOutputDir(spark, workingPath);
|
removeOutputDir(spark, workingPath);
|
||||||
execBulkTag(spark, inputPath, workingPath, datasourceMapPath, resultClazz);
|
selectCompliantDatasources(spark, inputPath, workingPath, datasourceMapPath);
|
||||||
|
execBulkTag(spark, inputPath, workingPath, resultType, resultClazz);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void selectCompliantDatasources(SparkSession spark, String inputPath, String workingPath,
|
||||||
|
String datasourceMapPath) {
|
||||||
|
Dataset<Datasource> datasources = readPath(spark, inputPath + "datasource", Datasource.class)
|
||||||
|
.filter((FilterFunction<Datasource>) ds -> {
|
||||||
|
final String compatibility = ds.getOpenairecompatibility().getClassid();
|
||||||
|
return compatibility.equalsIgnoreCase(OPENAIRE_3) ||
|
||||||
|
compatibility.equalsIgnoreCase(OPENAIRE_4) ||
|
||||||
|
compatibility.equalsIgnoreCase(OPENAIRE_CRIS) ||
|
||||||
|
compatibility.equalsIgnoreCase(OPENAIRE_DATA);
|
||||||
|
});
|
||||||
|
|
||||||
|
Dataset<DatasourceMaster> datasourceMaster = readPath(spark, datasourceMapPath, DatasourceMaster.class);
|
||||||
|
|
||||||
|
datasources
|
||||||
|
.joinWith(datasourceMaster, datasources.col("id").equalTo(datasourceMaster.col("master")), "left")
|
||||||
|
.map(
|
||||||
|
(MapFunction<Tuple2<Datasource, DatasourceMaster>, DatasourceMaster>) t2 -> t2._2(),
|
||||||
|
Encoders.bean(DatasourceMaster.class))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(workingPath + "datasource");
|
||||||
|
}
|
||||||
|
|
||||||
private static <R extends Result> void execBulkTag(
|
private static <R extends Result> void execBulkTag(
|
||||||
SparkSession spark,
|
SparkSession spark,
|
||||||
String inputPath,
|
String inputPath,
|
||||||
String workingPath,
|
String workingPath,
|
||||||
String datasourceMapPath,
|
String resultType,
|
||||||
Class<R> resultClazz) {
|
Class<R> resultClazz) {
|
||||||
|
|
||||||
List<String> hostedByList = readPath(spark, datasourceMapPath, DatasourceMaster.class)
|
List<String> hostedByList = readPath(spark, workingPath + "datasource", DatasourceMaster.class)
|
||||||
.map((MapFunction<DatasourceMaster, String>) dm -> dm.getMaster(), Encoders.STRING())
|
.map((MapFunction<DatasourceMaster, String>) dm -> dm.getMaster(), Encoders.STRING())
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
|
|
||||||
readPath(spark, inputPath, resultClazz)
|
readPath(spark, inputPath + resultType, resultClazz)
|
||||||
.map(patchResult(), Encoders.bean(resultClazz))
|
|
||||||
.filter(Objects::nonNull)
|
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<R, R>) value -> enrich(value, hostedByList),
|
(MapFunction<R, R>) value -> enrich(value, hostedByList),
|
||||||
Encoders.bean(resultClazz))
|
Encoders.bean(resultClazz))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(workingPath);
|
.json(workingPath + resultType);
|
||||||
|
|
||||||
readPath(spark, workingPath, resultClazz)
|
readPath(spark, workingPath + resultType, resultClazz)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(inputPath);
|
.json(inputPath + resultType);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> R enrich(R value, List<String> hostedByList) {
|
private static <R extends Result> R enrich(R value, List<String> hostedByList) {
|
||||||
|
if (value.getDataInfo().getDeletedbyinference() == null) {
|
||||||
|
value.getDataInfo().setDeletedbyinference(false);
|
||||||
|
}
|
||||||
|
if (value.getContext() == null) {
|
||||||
|
value.setContext(new ArrayList<>());
|
||||||
|
}
|
||||||
if (value
|
if (value
|
||||||
.getInstance()
|
.getInstance()
|
||||||
.stream()
|
.stream()
|
||||||
|
|
|
@ -66,7 +66,7 @@ public class PrepareDatasourceCountryAssociation {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
removeOutputDir(spark, workingPath);
|
removeOutputDir(spark, workingPath + "/datasourceCountry");
|
||||||
prepareDatasourceCountryAssociation(
|
prepareDatasourceCountryAssociation(
|
||||||
spark,
|
spark,
|
||||||
Arrays.asList(parser.get("whitelist").split(";")),
|
Arrays.asList(parser.get("whitelist").split(";")),
|
||||||
|
|
|
@ -8,11 +8,13 @@ import java.util.ArrayList;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import eu.dnetlib.dhp.countrypropagation.pojo.CountrySbs;
|
import eu.dnetlib.dhp.countrypropagation.pojo.CountrySbs;
|
||||||
import eu.dnetlib.dhp.countrypropagation.pojo.DatasourceCountry;
|
import eu.dnetlib.dhp.countrypropagation.pojo.DatasourceCountry;
|
||||||
import eu.dnetlib.dhp.countrypropagation.pojo.ResultCountrySet;
|
import eu.dnetlib.dhp.countrypropagation.pojo.ResultCountrySet;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.ForeachFunction;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.api.java.function.MapGroupsFunction;
|
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
|
@ -44,6 +46,7 @@ public class PrepareResultCountrySet {
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
String workingPath = parser.get("workingPath");
|
String workingPath = parser.get("workingPath");
|
||||||
|
log.info("workingPath: {}", workingPath);
|
||||||
|
|
||||||
String inputPath = parser.get("sourcePath");
|
String inputPath = parser.get("sourcePath");
|
||||||
log.info("inputPath: {}", inputPath);
|
log.info("inputPath: {}", inputPath);
|
||||||
|
@ -54,11 +57,6 @@ public class PrepareResultCountrySet {
|
||||||
final String resultType = resultClassName.substring(resultClassName.lastIndexOf(".") + 1).toLowerCase();
|
final String resultType = resultClassName.substring(resultClassName.lastIndexOf(".") + 1).toLowerCase();
|
||||||
log.info("resultType: {}", resultType);
|
log.info("resultType: {}", resultType);
|
||||||
|
|
||||||
String outputPath = workingPath + "/" + resultType; // parser.get("outputPath");
|
|
||||||
log.info("outputPath: {}", outputPath);
|
|
||||||
|
|
||||||
final String datasourcecountrypath = workingPath + "/datasourceCountry";// parser.get("preparedInfoPath");
|
|
||||||
log.info("preparedInfoPath: {}", datasourcecountrypath);
|
|
||||||
|
|
||||||
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
|
|
||||||
|
@ -68,30 +66,31 @@ public class PrepareResultCountrySet {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, workingPath + "/preparedInfo/" + resultType);
|
||||||
getPotentialResultToUpdate(
|
getPotentialResultToUpdate(
|
||||||
spark,
|
spark,
|
||||||
inputPath,
|
inputPath,
|
||||||
outputPath,
|
workingPath,
|
||||||
datasourcecountrypath,
|
resultType,
|
||||||
workingPath + "/resultCfHb/" + resultType,
|
resultClazz);
|
||||||
resultClazz);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> void getPotentialResultToUpdate(
|
private static <R extends Result> void getPotentialResultToUpdate(
|
||||||
SparkSession spark,
|
SparkSession spark,
|
||||||
String inputPath,
|
String inputPath,
|
||||||
String outputPath,
|
|
||||||
String datasourcecountrypath,
|
|
||||||
String workingPath,
|
String workingPath,
|
||||||
|
String resultType,
|
||||||
Class<R> resultClazz) {
|
Class<R> resultClazz) {
|
||||||
|
|
||||||
PropagationConstant.createCfHbforResult(spark, inputPath, workingPath, resultClazz);
|
final String datasourcecountrypath = workingPath + "/datasourceCountry";
|
||||||
|
final String cfhbpath = workingPath + "/resultCfHb/" + resultType;
|
||||||
|
final String outputPath = workingPath + "/preparedInfo/" + resultType;
|
||||||
|
|
||||||
|
PropagationConstant.createCfHbforResult(spark, inputPath, cfhbpath, resultClazz);
|
||||||
|
|
||||||
Dataset<DatasourceCountry> datasource_country = readPath(spark, datasourcecountrypath, DatasourceCountry.class);
|
Dataset<DatasourceCountry> datasource_country = readPath(spark, datasourcecountrypath, DatasourceCountry.class);
|
||||||
|
Dataset<EntityEntityRel> cfhb = readPath(spark, cfhbpath, EntityEntityRel.class);
|
||||||
Dataset<EntityEntityRel> cfhb = readPath(spark, workingPath, EntityEntityRel.class);
|
|
||||||
|
|
||||||
datasource_country
|
datasource_country
|
||||||
.joinWith(
|
.joinWith(
|
||||||
|
|
|
@ -52,12 +52,14 @@ public class SparkCountryPropagationJob {
|
||||||
String workingPath = parser.get("workingPath");
|
String workingPath = parser.get("workingPath");
|
||||||
log.info("workingPath: {}", workingPath);
|
log.info("workingPath: {}", workingPath);
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
|
||||||
log.info("outputPath: {}", outputPath);
|
|
||||||
|
|
||||||
final String resultClassName = parser.get("resultTableName");
|
final String resultClassName = parser.get("resultTableName");
|
||||||
log.info("resultTableName: {}", resultClassName);
|
log.info("resultTableName: {}", resultClassName);
|
||||||
|
|
||||||
|
final String resultType = resultClassName.substring(resultClassName.lastIndexOf(".") + 1).toLowerCase();
|
||||||
|
log.info("resultType: {}", resultType);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
Class<? extends Result> resultClazz = (Class<? extends Result>) Class.forName(resultClassName);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
@ -65,12 +67,12 @@ public class SparkCountryPropagationJob {
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
removeOutputDir(spark, outputPath);
|
removeOutputDir(spark, workingPath + "/" + resultType);
|
||||||
execPropagation(
|
execPropagation(
|
||||||
spark,
|
spark,
|
||||||
sourcePath,
|
sourcePath,
|
||||||
workingPath,
|
workingPath,
|
||||||
outputPath,
|
resultType,
|
||||||
resultClazz);
|
resultClazz);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -78,18 +80,15 @@ public class SparkCountryPropagationJob {
|
||||||
private static <R extends Result> void execPropagation(
|
private static <R extends Result> void execPropagation(
|
||||||
SparkSession spark,
|
SparkSession spark,
|
||||||
String sourcePath,
|
String sourcePath,
|
||||||
String preparedInfoPath,
|
String workingPath,
|
||||||
String outputPath,
|
String resultType,
|
||||||
Class<R> resultClazz) {
|
Class<R> resultClazz) {
|
||||||
|
|
||||||
log.info("Reading Graph table from: {}", sourcePath);
|
log.info("Reading Graph table from: {}", sourcePath);
|
||||||
Dataset<R> res = readPath(spark, sourcePath, resultClazz);
|
Dataset<R> res = readPath(spark, sourcePath, resultClazz);
|
||||||
|
|
||||||
log.info("Reading prepared info: {}", preparedInfoPath);
|
log.info("Reading prepared info: {}", workingPath + "/preparedInfo/" + resultType);
|
||||||
Dataset<ResultCountrySet> prepared = spark
|
Dataset<ResultCountrySet> prepared = readPath(spark, workingPath + "/preparedInfo/" + resultType, ResultCountrySet.class);
|
||||||
.read()
|
|
||||||
.json(preparedInfoPath)
|
|
||||||
.as(Encoders.bean(ResultCountrySet.class));
|
|
||||||
|
|
||||||
res
|
res
|
||||||
.joinWith(prepared, res.col("id").equalTo(prepared.col("resultId")), "left_outer")
|
.joinWith(prepared, res.col("id").equalTo(prepared.col("resultId")), "left_outer")
|
||||||
|
@ -97,9 +96,9 @@ public class SparkCountryPropagationJob {
|
||||||
.write()
|
.write()
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.json(outputPath);
|
.json(workingPath + "/" + resultType);
|
||||||
|
|
||||||
readPath(spark, outputPath, resultClazz)
|
readPath(spark, workingPath + "/" + resultType, resultClazz)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
|
|
|
@ -29,6 +29,13 @@
|
||||||
"paramLongName": "isSparkSessionManaged",
|
"paramLongName": "isSparkSessionManaged",
|
||||||
"paramDescription": "true if the spark session is managed, false otherwise",
|
"paramDescription": "true if the spark session is managed, false otherwise",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
|
||||||
|
"paramName": "rt",
|
||||||
|
"paramLongName": "resultType",
|
||||||
|
"paramDescription": "the result type",
|
||||||
|
"paramRequired": true
|
||||||
}
|
}
|
||||||
|
|
||||||
]
|
]
|
|
@ -11,17 +11,12 @@
|
||||||
"paramDescription": "the name of the result table we are currently working on",
|
"paramDescription": "the name of the result table we are currently working on",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"paramName": "out",
|
|
||||||
"paramLongName": "outputPath",
|
|
||||||
"paramDescription": "the path used to store temporary output files",
|
|
||||||
"paramRequired": true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"paramName": "wp",
|
"paramName": "wp",
|
||||||
"paramLongName": "workingPath",
|
"paramLongName": "workingPath",
|
||||||
"paramDescription": "the path where prepared info have been stored",
|
"paramDescription": "the path where prepared info have been stored",
|
||||||
"paramRequired": false
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"paramName": "ssm",
|
"paramName": "ssm",
|
||||||
|
|
|
@ -6,8 +6,8 @@
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"paramName": "out",
|
"paramName": "wp",
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "workingPath",
|
||||||
"paramDescription": "the path used to store temporary output files",
|
"paramDescription": "the path used to store temporary output files",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},
|
||||||
|
|
|
@ -293,10 +293,6 @@
|
||||||
<name>sourcePath</name>
|
<name>sourcePath</name>
|
||||||
<value>${outputPath}</value>
|
<value>${outputPath}</value>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<value>${workingDir}/country/result</value>
|
|
||||||
</property>
|
|
||||||
<property>
|
<property>
|
||||||
<name>whitelist</name>
|
<name>whitelist</name>
|
||||||
<value>${datasourceWhitelistForCountryPropagation}</value>
|
<value>${datasourceWhitelistForCountryPropagation}</value>
|
||||||
|
|
|
@ -12,11 +12,6 @@
|
||||||
<name>allowedtypes</name>
|
<name>allowedtypes</name>
|
||||||
<description>the allowed types</description>
|
<description>the allowed types</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
|
||||||
<name>outputPath</name>
|
|
||||||
<description>the output path</description>
|
|
||||||
</property>
|
|
||||||
|
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<global>
|
<global>
|
||||||
|
@ -60,18 +55,18 @@
|
||||||
<arg>--allowedtypes</arg><arg>${allowedtypes}</arg>
|
<arg>--allowedtypes</arg><arg>${allowedtypes}</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingDir}/country</arg>
|
<arg>--workingPath</arg><arg>${workingDir}/country</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="fork_join_prepare_result_country"/>
|
<ok to="fork_prepare_result_country"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<fork name="fork_join_prepare_result_country">
|
<fork name="fork_prepare_result_country">
|
||||||
<path start="join_prepareresult_publication"/>
|
<path start="prepareresult_publication"/>
|
||||||
<path start="join_prepareresult_dataset"/>
|
<path start="prepareresult_dataset"/>
|
||||||
<path start="join_prepareresult_otherresearchproduct"/>
|
<path start="prepareresult_otherresearchproduct"/>
|
||||||
<path start="join_prepareresult_software"/>
|
<path start="prepareresult_software"/>
|
||||||
</fork>
|
</fork>
|
||||||
|
|
||||||
<action name="join_prepareresult_publication">
|
<action name="prepareresult_publication">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -99,7 +94,7 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="join_prepareresult_dataset">
|
<action name="prepareresult_dataset">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -127,7 +122,7 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="join_prepareresult_otherresearchproduct">
|
<action name="prepareresult_otherresearchproduct">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -155,7 +150,7 @@
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="join_prepareresult_software">
|
<action name="prepareresult_software">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -185,14 +180,14 @@
|
||||||
|
|
||||||
<join name="wait_prepare" to="fork_join_apply_country_propagation"/>
|
<join name="wait_prepare" to="fork_join_apply_country_propagation"/>
|
||||||
|
|
||||||
<fork name="fork_join_apply_country_propagation">
|
<fork name="fork_apply_country_propagation">
|
||||||
<path start="join_propagation_publication"/>
|
<path start="propagation_publication"/>
|
||||||
<path start="join_propagation_dataset"/>
|
<path start="propagation_dataset"/>
|
||||||
<path start="join_propagation_otherresearchproduct"/>
|
<path start="propagation_otherresearchproduct"/>
|
||||||
<path start="join_propagation_software"/>
|
<path start="propagation_software"/>
|
||||||
</fork>
|
</fork>
|
||||||
|
|
||||||
<action name="join_propagation_publication">
|
<action name="propagation_publication">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -213,15 +208,15 @@
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/publication</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingDir}/country/publication</arg>
|
<arg>--workingPath</arg><arg>${workingDir}/country</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/country/result/publication</arg>
|
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="wait"/>
|
<ok to="wait"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="join_propagation_dataset">
|
<action name="propagation_dataset">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -242,15 +237,15 @@
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/dataset</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingDir}/country/dataset</arg>
|
<arg>--workingPath</arg><arg>${workingDir}/country</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/country/result/dataset</arg>
|
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="wait"/>
|
<ok to="wait"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="join_propagation_otherresearchproduct">
|
<action name="propagation_otherresearchproduct">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -271,15 +266,15 @@
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/otherresearchproduct</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingDir}/country/otherresearchproduct</arg>
|
<arg>--workingPath</arg><arg>${workingDir}/country</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/country/result/otherresearchproduct</arg>
|
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="wait"/>
|
<ok to="wait"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="join_propagation_software">
|
<action name="propagation_software">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
|
@ -300,10 +295,9 @@
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/software</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingDir}/country/software</arg>
|
<arg>--workingPath</arg><arg>${workingDir}/country</arg>
|
||||||
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--resultTableName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--outputPath</arg><arg>${workingDir}/country/result/software</arg>
|
</spark>
|
||||||
</spark>
|
|
||||||
<ok to="wait"/>
|
<ok to="wait"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
|
@ -27,16 +27,11 @@ import org.slf4j.LoggerFactory;
|
||||||
*/
|
*/
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.bulktag.eosc.DatasourceMaster;
|
||||||
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
import eu.dnetlib.dhp.bulktag.eosc.SparkEoscBulkTag;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
|
|
||||||
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc
|
|
||||||
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc
|
|
||||||
//"50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7" has two instance one hostedby eosc
|
|
||||||
//"50|475c1990cbb2::3894c94123e96df8a21249957cf160cb" has EoscTag
|
|
||||||
|
|
||||||
public class EOSCContextTaggingTest {
|
public class EOSCContextTaggingTest {
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
@ -78,6 +73,22 @@ public class EOSCContextTaggingTest {
|
||||||
@Test
|
@Test
|
||||||
void EoscContextTagTest() throws Exception {
|
void EoscContextTagTest() throws Exception {
|
||||||
|
|
||||||
|
//"50|475c1990cbb2::0fecfb874d9395aa69d2f4d7cd1acbea" has instance hostedby eosc (cris)
|
||||||
|
//"50|475c1990cbb2::3185cd5d8a2b0a06bb9b23ef11748eb1" has instance hostedby eosc (zenodo)
|
||||||
|
//"50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7" has two instance one hostedby eosc (wrong compatibility)
|
||||||
|
//"50|475c1990cbb2::3894c94123e96df8a21249957cf160cb" has EoscTag
|
||||||
|
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource_1").getPath())
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Datasource>) value -> OBJECT_MAPPER.readValue(value, Datasource.class),
|
||||||
|
Encoders.bean(Datasource.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(workingDir.toString() + "/input/datasource");
|
||||||
|
|
||||||
spark
|
spark
|
||||||
.read()
|
.read()
|
||||||
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath())
|
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath())
|
||||||
|
@ -94,17 +105,24 @@ public class EOSCContextTaggingTest {
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-sourcePath",
|
"-sourcePath",
|
||||||
workingDir.toString() + "/input/dataset",
|
workingDir.toString() + "/input/",
|
||||||
"-workingPath", workingDir.toString() + "/working/dataset",
|
"-workingPath", workingDir.toString() + "/working/",
|
||||||
"-datasourceMapPath",
|
"-datasourceMapPath",
|
||||||
getClass()
|
getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
|
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
|
||||||
.getPath(),
|
.getPath(),
|
||||||
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset"
|
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
|
||||||
|
"-resultType", "dataset"
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2, sc
|
||||||
|
.textFile(workingDir.toString() + "/working/datasource")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceMaster.class))
|
||||||
|
.count());
|
||||||
|
|
||||||
JavaRDD<Dataset> tmp = sc
|
JavaRDD<Dataset> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/input/dataset")
|
.textFile(workingDir.toString() + "/input/dataset")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
||||||
|
@ -113,7 +131,7 @@ public class EOSCContextTaggingTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
4,
|
2,
|
||||||
tmp
|
tmp
|
||||||
.filter(
|
.filter(
|
||||||
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
@ -140,17 +158,17 @@ public class EOSCContextTaggingTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
0,
|
||||||
tmp
|
tmp
|
||||||
.filter(
|
.filter(
|
||||||
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
d -> d.getId().equals("50|475c1990cbb2::449f28eefccf9f70c04ad70d61e041c7")
|
||||||
&&
|
&&
|
||||||
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
d.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
0,
|
||||||
tmp
|
tmp
|
||||||
.filter(
|
.filter(
|
||||||
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
d -> d.getId().equals("50|475c1990cbb2::3894c94123e96df8a21249957cf160cb")
|
||||||
|
@ -159,4 +177,62 @@ public class EOSCContextTaggingTest {
|
||||||
.count());
|
.count());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void EoscContextTagTestEmptyDatasource() throws Exception {
|
||||||
|
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/dataset/dataset_10.json").getPath())
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Dataset>) value -> OBJECT_MAPPER.readValue(value, Dataset.class),
|
||||||
|
Encoders.bean(Dataset.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(workingDir.toString() + "/input/dataset");
|
||||||
|
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(getClass().getResource("/eu/dnetlib/dhp/bulktag/eosc/datasource/datasource").getPath())
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Datasource>) value -> OBJECT_MAPPER.readValue(value, Datasource.class),
|
||||||
|
Encoders.bean(Datasource.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(workingDir.toString() + "/input/datasource");
|
||||||
|
|
||||||
|
SparkEoscBulkTag
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-sourcePath",
|
||||||
|
workingDir.toString() + "/input/",
|
||||||
|
"-workingPath", workingDir.toString() + "/working/",
|
||||||
|
"-datasourceMapPath",
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/bulktag/eosc/datasourceMasterAssociation/datasourceMaster")
|
||||||
|
.getPath(),
|
||||||
|
"-resultTableName", "eu.dnetlib.dhp.schema.oaf.Dataset",
|
||||||
|
"-resultType", "dataset"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Dataset> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/input/dataset")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Dataset.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(10, tmp.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
0,
|
||||||
|
tmp
|
||||||
|
.filter(
|
||||||
|
s -> s.getContext().stream().anyMatch(c -> c.getId().equals("eosc")))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,634 @@
|
||||||
|
package eu.dnetlib.dhp.countrypropagation;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import eu.dnetlib.dhp.countrypropagation.pojo.DatasourceCountry;
|
||||||
|
import eu.dnetlib.dhp.countrypropagation.pojo.ResultCountrySet;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Country;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 23/11/22
|
||||||
|
*/
|
||||||
|
public class CountryPropagationAllStepsTest {
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files.createTempDirectory(DatasourceCountryPreparationTest.class.getSimpleName());
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(DatasourceCountryPreparationTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(DatasourceCountryPreparationTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void allStepsTest() throws Exception {
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
String sourcePath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/graph")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
PrepareDatasourceCountryAssociation
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath", sourcePath,
|
||||||
|
"--workingPath", workingDir.toString() + "/country",
|
||||||
|
"--allowedtypes", "pubsrepository::institutional",
|
||||||
|
"--whitelist",
|
||||||
|
"10|openaire____::3795d6478e30e2c9f787d427ff160944;10|opendoar____::16e6a3326dd7d868cbc926602a61e4d0;10|eurocrisdris::fe4903425d9040f680d8610d9079ea14;10|openaire____::5b76240cc27a58c6f7ceef7d8c36660e;10|openaire____::172bbccecf8fca44ab6a6653e84cb92a;10|openaire____::149c6590f8a06b46314eed77bfca693f;10|eurocrisdris::a6026877c1a174d60f81fd71f62df1c1;10|openaire____::4692342f0992d91f9e705c26959f09e0;10|openaire____::8d529dbb05ec0284662b391789e8ae2a;10|openaire____::345c9d171ef3c5d706d08041d506428c;10|opendoar____::1c1d4df596d01da60385f0bb17a4a9e0;10|opendoar____::7a614fd06c325499f1680b9896beedeb;10|opendoar____::1ee3dfcd8a0645a25a35977997223d22;10|opendoar____::d296c101daa88a51f6ca8cfc1ac79b50;10|opendoar____::798ed7d4ee7138d49b8828958048130a;10|openaire____::c9d2209ecc4d45ba7b4ca7597acb88a2;10|eurocrisdris::c49e0fe4b9ba7b7fab717d1f0f0a674d;10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539;10|eurocrisdris::432ca599953ff50cd4eeffe22faf3e48"
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
sc.textFile(
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/publication")
|
||||||
|
.getPath()).saveAsTextFile(workingDir.toString() + "/source/publication");
|
||||||
|
|
||||||
|
sc
|
||||||
|
.textFile(
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/software")
|
||||||
|
.getPath()).saveAsTextFile(workingDir.toString() + "/source/software");
|
||||||
|
|
||||||
|
|
||||||
|
verifyDatasourceCountry();
|
||||||
|
|
||||||
|
PrepareResultCountrySet
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--workingPath", workingDir.toString() + "/country",
|
||||||
|
"--sourcePath", workingDir.toString() + "/source/publication",
|
||||||
|
"--resultTableName", Publication.class.getCanonicalName()
|
||||||
|
});
|
||||||
|
|
||||||
|
verifyResultCountrySet();
|
||||||
|
|
||||||
|
PrepareResultCountrySet
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--workingPath", workingDir.toString() + "/country",
|
||||||
|
"--sourcePath", workingDir.toString() + "/source/software",
|
||||||
|
"--resultTableName", Software.class.getCanonicalName()
|
||||||
|
});
|
||||||
|
|
||||||
|
SparkCountryPropagationJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath",workingDir.toString() + "/source/publication",
|
||||||
|
"-resultTableName", Publication.class.getCanonicalName(),
|
||||||
|
"-workingPath", workingDir.toString() +"/country"
|
||||||
|
});
|
||||||
|
|
||||||
|
verifyPropagationPublication();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
SparkCountryPropagationJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath",workingDir.toString() + "/source/software",
|
||||||
|
"-resultTableName", Software.class.getCanonicalName(),
|
||||||
|
"-workingPath", workingDir.toString() + "/country"
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
verifyPropagationSoftware();
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void verifyDatasourceCountry(){
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<DatasourceCountry> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/country/datasourceCountry")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceCountry.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(3, tmp.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|eurocrisdris::fe4903425d9040f680d8610d9079ea14"))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|opendoar____::f0dd4a99fba6075a9494772b58f95280"))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539"))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"NL", tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|eurocrisdris::fe4903425d9040f680d8610d9079ea14"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry()
|
||||||
|
.getClassid());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"Netherlands", tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|eurocrisdris::fe4903425d9040f680d8610d9079ea14"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry()
|
||||||
|
.getClassname());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"IT", tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|opendoar____::f0dd4a99fba6075a9494772b58f95280"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry()
|
||||||
|
.getClassid());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"Italy", tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|opendoar____::f0dd4a99fba6075a9494772b58f95280"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry()
|
||||||
|
.getClassname());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"FR", tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry()
|
||||||
|
.getClassid());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"France", tmp
|
||||||
|
.filter(
|
||||||
|
dsc -> dsc
|
||||||
|
.getDataSourceId()
|
||||||
|
.equals("10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry()
|
||||||
|
.getClassname());
|
||||||
|
|
||||||
|
tmp.foreach(e -> System.out.println(OBJECT_MAPPER.writeValueAsString(e)));
|
||||||
|
}
|
||||||
|
|
||||||
|
void verifyResultCountrySet(){
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<ResultCountrySet> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/country/preparedInfo/publication")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, ResultCountrySet.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(5, tmp.count());
|
||||||
|
|
||||||
|
ResultCountrySet rc = tmp
|
||||||
|
.filter(r -> r.getResultId().equals("50|06cdd3ff4700::49ec404cee4e1452808aabeaffbd3072"))
|
||||||
|
.collect()
|
||||||
|
.get(0);
|
||||||
|
Assertions.assertEquals(1, rc.getCountrySet().size());
|
||||||
|
Assertions.assertEquals("NL", rc.getCountrySet().get(0).getClassid());
|
||||||
|
Assertions.assertEquals("Netherlands", rc.getCountrySet().get(0).getClassname());
|
||||||
|
|
||||||
|
rc = tmp
|
||||||
|
.filter(r -> r.getResultId().equals("50|07b5c0ccd4fe::e7f5459cc97865f2af6e3da964c1250b"))
|
||||||
|
.collect()
|
||||||
|
.get(0);
|
||||||
|
Assertions.assertEquals(1, rc.getCountrySet().size());
|
||||||
|
Assertions.assertEquals("NL", rc.getCountrySet().get(0).getClassid());
|
||||||
|
Assertions.assertEquals("Netherlands", rc.getCountrySet().get(0).getClassname());
|
||||||
|
|
||||||
|
rc = tmp
|
||||||
|
.filter(r -> r.getResultId().equals("50|355e65625b88::e7d48a470b13bda61f7ebe3513e20cb6"))
|
||||||
|
.collect()
|
||||||
|
.get(0);
|
||||||
|
Assertions.assertEquals(2, rc.getCountrySet().size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
rc
|
||||||
|
.getCountrySet()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("IT") && cs.getClassname().equals("Italy")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
rc
|
||||||
|
.getCountrySet()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("FR") && cs.getClassname().equals("France")));
|
||||||
|
|
||||||
|
rc = tmp
|
||||||
|
.filter(r -> r.getResultId().equals("50|355e65625b88::74009c567c81b4aa55c813db658734df"))
|
||||||
|
.collect()
|
||||||
|
.get(0);
|
||||||
|
Assertions.assertEquals(2, rc.getCountrySet().size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
rc
|
||||||
|
.getCountrySet()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("IT") && cs.getClassname().equals("Italy")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
rc
|
||||||
|
.getCountrySet()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("NL") && cs.getClassname().equals("Netherlands")));
|
||||||
|
|
||||||
|
rc = tmp
|
||||||
|
.filter(r -> r.getResultId().equals("50|355e65625b88::54a1c76f520bb2c8da27d12e42891088"))
|
||||||
|
.collect()
|
||||||
|
.get(0);
|
||||||
|
Assertions.assertEquals(2, rc.getCountrySet().size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
rc
|
||||||
|
.getCountrySet()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("IT") && cs.getClassname().equals("Italy")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
rc
|
||||||
|
.getCountrySet()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("FR") && cs.getClassname().equals("France")));
|
||||||
|
}
|
||||||
|
|
||||||
|
void verifyPropagationPublication(){
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Publication> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/country/publication")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Publication.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(12, tmp.count());
|
||||||
|
|
||||||
|
Assertions.assertEquals(5, tmp.filter(r -> r.getCountry().size() > 0).count());
|
||||||
|
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r.getCountry().stream().forEach(c -> Assertions.assertEquals("dnet:countries", c.getSchemeid())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.forEach(c -> Assertions.assertEquals("dnet:countries", c.getSchemename())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.forEach(c -> Assertions.assertFalse(c.getDataInfo().getDeletedbyinference())));
|
||||||
|
tmp.foreach(r -> r.getCountry().stream().forEach(c -> Assertions.assertFalse(c.getDataInfo().getInvisible())));
|
||||||
|
tmp.foreach(r -> r.getCountry().stream().forEach(c -> Assertions.assertTrue(c.getDataInfo().getInferred())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r.getCountry().stream().forEach(c -> Assertions.assertEquals("0.85", c.getDataInfo().getTrust())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.forEach(c -> Assertions.assertEquals("propagation", c.getDataInfo().getInferenceprovenance())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.forEach(
|
||||||
|
c -> Assertions
|
||||||
|
.assertEquals("country:instrepos", c.getDataInfo().getProvenanceaction().getClassid())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.forEach(
|
||||||
|
c -> Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"dnet:provenanceActions", c.getDataInfo().getProvenanceaction().getSchemeid())));
|
||||||
|
tmp
|
||||||
|
.foreach(
|
||||||
|
r -> r
|
||||||
|
.getCountry()
|
||||||
|
.stream()
|
||||||
|
.forEach(
|
||||||
|
c -> Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"dnet:provenanceActions", c.getDataInfo().getProvenanceaction().getSchemename())));
|
||||||
|
|
||||||
|
List<Country> countries = tmp
|
||||||
|
.filter(r -> r.getId().equals("50|06cdd3ff4700::49ec404cee4e1452808aabeaffbd3072"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry();
|
||||||
|
Assertions.assertEquals(1, countries.size());
|
||||||
|
Assertions.assertEquals("NL", countries.get(0).getClassid());
|
||||||
|
Assertions.assertEquals("Netherlands", countries.get(0).getClassname());
|
||||||
|
|
||||||
|
countries = tmp
|
||||||
|
.filter(r -> r.getId().equals("50|07b5c0ccd4fe::e7f5459cc97865f2af6e3da964c1250b"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry();
|
||||||
|
Assertions.assertEquals(1, countries.size());
|
||||||
|
Assertions.assertEquals("NL", countries.get(0).getClassid());
|
||||||
|
Assertions.assertEquals("Netherlands", countries.get(0).getClassname());
|
||||||
|
|
||||||
|
countries = tmp
|
||||||
|
.filter(r -> r.getId().equals("50|355e65625b88::e7d48a470b13bda61f7ebe3513e20cb6"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry();
|
||||||
|
Assertions.assertEquals(2, countries.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
countries.stream().anyMatch(cs -> cs.getClassid().equals("IT") && cs.getClassname().equals("Italy")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
countries.stream().anyMatch(cs -> cs.getClassid().equals("FR") && cs.getClassname().equals("France")));
|
||||||
|
|
||||||
|
countries = tmp
|
||||||
|
.filter(r -> r.getId().equals("50|355e65625b88::74009c567c81b4aa55c813db658734df"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry();
|
||||||
|
Assertions.assertEquals(2, countries.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
countries.stream().anyMatch(cs -> cs.getClassid().equals("IT") && cs.getClassname().equals("Italy")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
countries
|
||||||
|
.stream()
|
||||||
|
.anyMatch(cs -> cs.getClassid().equals("NL") && cs.getClassname().equals("Netherlands")));
|
||||||
|
|
||||||
|
countries = tmp
|
||||||
|
.filter(r -> r.getId().equals("50|355e65625b88::54a1c76f520bb2c8da27d12e42891088"))
|
||||||
|
.collect()
|
||||||
|
.get(0)
|
||||||
|
.getCountry();
|
||||||
|
Assertions.assertEquals(2, countries.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
countries.stream().anyMatch(cs -> cs.getClassid().equals("IT") && cs.getClassname().equals("Italy")));
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
countries.stream().anyMatch(cs -> cs.getClassid().equals("FR") && cs.getClassname().equals("France")));
|
||||||
|
}
|
||||||
|
|
||||||
|
void verifyPropagationSoftware(){
|
||||||
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
JavaRDD<Software> tmp = sc
|
||||||
|
.textFile(workingDir.toString() + "/source/software")
|
||||||
|
.map(item -> OBJECT_MAPPER.readValue(item, Software.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(10, tmp.count());
|
||||||
|
|
||||||
|
Dataset<Software> verificationDs = spark.createDataset(tmp.rdd(), Encoders.bean(Software.class));
|
||||||
|
|
||||||
|
Assertions.assertEquals(6, verificationDs.filter("size(country) > 0").count());
|
||||||
|
Assertions.assertEquals(3, verificationDs.filter("size(country) = 1").count());
|
||||||
|
Assertions.assertEquals(3, verificationDs.filter("size(country) = 2").count());
|
||||||
|
Assertions.assertEquals(0, verificationDs.filter("size(country) > 2").count());
|
||||||
|
|
||||||
|
Dataset<String> countryExploded = verificationDs
|
||||||
|
.flatMap(
|
||||||
|
(FlatMapFunction<Software, Country>) row -> row.getCountry().iterator(), Encoders.bean(Country.class))
|
||||||
|
.map((MapFunction<Country, String>) Qualifier::getClassid, Encoders.STRING());
|
||||||
|
|
||||||
|
Assertions.assertEquals(9, countryExploded.count());
|
||||||
|
|
||||||
|
Assertions.assertEquals(1, countryExploded.filter("value = 'FR'").count());
|
||||||
|
Assertions.assertEquals(1, countryExploded.filter("value = 'TR'").count());
|
||||||
|
Assertions.assertEquals(2, countryExploded.filter("value = 'IT'").count());
|
||||||
|
Assertions.assertEquals(1, countryExploded.filter("value = 'US'").count());
|
||||||
|
Assertions.assertEquals(1, countryExploded.filter("value = 'MX'").count());
|
||||||
|
Assertions.assertEquals(1, countryExploded.filter("value = 'CH'").count());
|
||||||
|
Assertions.assertEquals(2, countryExploded.filter("value = 'JP'").count());
|
||||||
|
|
||||||
|
Dataset<Tuple2<String, String>> countryExplodedWithCountryclassid = verificationDs
|
||||||
|
.flatMap((FlatMapFunction<Software, Tuple2<String, String>>) row -> {
|
||||||
|
List<Tuple2<String, String>> prova = new ArrayList<>();
|
||||||
|
List<Country> countryList = row.getCountry();
|
||||||
|
countryList
|
||||||
|
.forEach(
|
||||||
|
c -> prova
|
||||||
|
.add(
|
||||||
|
new Tuple2<>(
|
||||||
|
row.getId(), c.getClassid())));
|
||||||
|
return prova.iterator();
|
||||||
|
}, Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
|
Assertions.assertEquals(9, countryExplodedWithCountryclassid.count());
|
||||||
|
|
||||||
|
//countryExplodedWithCountryclassid.show(false);
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
countryExplodedWithCountryclassid
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od______1582::6e7a9b21a2feef45673890432af34244' and _2 = 'FR' ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
countryExplodedWithCountryclassid
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|dedup_wf_001::40ea2f24181f6ae77b866ebcbffba523' and _2 = 'TR' ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
countryExplodedWithCountryclassid
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od______1106::2b7ca9726230be8e862be224fd463ac4' and (_2 = 'IT' or _2 = 'MX') ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
countryExplodedWithCountryclassid
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od_______935::46a0ad9964171c3dd13373f5427b9a1c' and (_2 = 'IT' or _2 = 'US') ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
countryExplodedWithCountryclassid
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|dedup_wf_001::b67bc915603fc01e445f2b5888ba7218' and _2 = 'JP'")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
countryExplodedWithCountryclassid
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od_______109::f375befa62a741e9250e55bcfa88f9a6' and (_2 = 'CH' or _2 = 'JP') ")
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Dataset<Tuple2<String, String>> countryExplodedWithCountryclassname = verificationDs
|
||||||
|
.flatMap(
|
||||||
|
(FlatMapFunction<Software, Tuple2<String, String>>) row -> {
|
||||||
|
List<Tuple2<String, String>> prova = new ArrayList<>();
|
||||||
|
List<Country> countryList = row.getCountry();
|
||||||
|
countryList
|
||||||
|
.forEach(
|
||||||
|
c -> prova
|
||||||
|
.add(
|
||||||
|
new Tuple2<>(
|
||||||
|
row.getId(),
|
||||||
|
c.getClassname())));
|
||||||
|
return prova.iterator();
|
||||||
|
},
|
||||||
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
|
//countryExplodedWithCountryclassname.show(false);
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
countryExplodedWithCountryclassname
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od______1582::6e7a9b21a2feef45673890432af34244' and _2 = 'France' ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
countryExplodedWithCountryclassname
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|dedup_wf_001::40ea2f24181f6ae77b866ebcbffba523' and _2 = 'Turkey' ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
countryExplodedWithCountryclassname
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od______1106::2b7ca9726230be8e862be224fd463ac4' and (_2 = 'Italy' or _2 = 'Mexico') ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
countryExplodedWithCountryclassname
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od_______935::46a0ad9964171c3dd13373f5427b9a1c' and (_2 = 'Italy' or _2 = 'United States') ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
countryExplodedWithCountryclassname
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|dedup_wf_001::b67bc915603fc01e445f2b5888ba7218' and _2 = 'Japan' ")
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
countryExplodedWithCountryclassname
|
||||||
|
.filter(
|
||||||
|
"_1 = '50|od_______109::f375befa62a741e9250e55bcfa88f9a6' and (_2 = 'Switzerland' or _2 = 'Japan') ")
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Dataset<Tuple2<String, String>> countryExplodedWithCountryProvenance = verificationDs
|
||||||
|
.flatMap(
|
||||||
|
(FlatMapFunction<Software, Tuple2<String, String>>) row -> {
|
||||||
|
List<Tuple2<String, String>> prova = new ArrayList<>();
|
||||||
|
List<Country> countryList = row.getCountry();
|
||||||
|
countryList
|
||||||
|
.forEach(
|
||||||
|
c -> prova
|
||||||
|
.add(
|
||||||
|
new Tuple2<>(
|
||||||
|
row.getId(),
|
||||||
|
c
|
||||||
|
.getDataInfo()
|
||||||
|
.getInferenceprovenance())));
|
||||||
|
return prova.iterator();
|
||||||
|
},
|
||||||
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
7, countryExplodedWithCountryProvenance.filter("_2 = 'propagation'").count());
|
||||||
|
}
|
||||||
|
}
|
|
@ -7,6 +7,7 @@ import java.nio.file.Path;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.antlr.v4.runtime.misc.Utils;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
@ -69,26 +70,35 @@ public class CountryPropagationJobTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testCountryPropagationSoftware() throws Exception {
|
void testCountryPropagationSoftware() throws Exception {
|
||||||
final String sourcePath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/software")
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
.getPath();
|
sc
|
||||||
final String preparedInfoPath = getClass()
|
.textFile(
|
||||||
.getResource("/eu/dnetlib/dhp/countrypropagation/preparedInfo/software")
|
getClass()
|
||||||
.getPath();
|
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/software")
|
||||||
SparkCountryPropagationJob
|
.getPath()).saveAsTextFile(workingDir.toString() + "/source/software");
|
||||||
|
|
||||||
|
|
||||||
|
sc
|
||||||
|
.textFile(
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/preparedInfo/software")
|
||||||
|
.getPath()).saveAsTextFile(workingDir.toString() + "/preparedInfo/software");
|
||||||
|
|
||||||
|
SparkCountryPropagationJob
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"--sourcePath", sourcePath,
|
"--sourcePath",workingDir.toString() + "/source/software",
|
||||||
"-resultTableName", Software.class.getCanonicalName(),
|
"-resultTableName", Software.class.getCanonicalName(),
|
||||||
"-outputPath", workingDir.toString() + "/software",
|
"-workingPath", workingDir.toString(),
|
||||||
"-preparedInfoPath", preparedInfoPath
|
"-resultType", "software"
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
|
||||||
|
|
||||||
JavaRDD<Software> tmp = sc
|
JavaRDD<Software> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/software")
|
.textFile(workingDir.toString() + "/source/software")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, Software.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, Software.class));
|
||||||
|
|
||||||
Assertions.assertEquals(10, tmp.count());
|
Assertions.assertEquals(10, tmp.count());
|
||||||
|
@ -130,7 +140,7 @@ public class CountryPropagationJobTest {
|
||||||
|
|
||||||
Assertions.assertEquals(9, countryExplodedWithCountryclassid.count());
|
Assertions.assertEquals(9, countryExplodedWithCountryclassid.count());
|
||||||
|
|
||||||
countryExplodedWithCountryclassid.show(false);
|
//countryExplodedWithCountryclassid.show(false);
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
1,
|
||||||
|
@ -190,7 +200,7 @@ public class CountryPropagationJobTest {
|
||||||
},
|
},
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
Encoders.tuple(Encoders.STRING(), Encoders.STRING()));
|
||||||
|
|
||||||
countryExplodedWithCountryclassname.show(false);
|
//countryExplodedWithCountryclassname.show(false);
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1,
|
1,
|
||||||
|
@ -259,23 +269,31 @@ public class CountryPropagationJobTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testCountryPropagationPublication() throws Exception {
|
void testCountryPropagationPublication() throws Exception {
|
||||||
final String sourcePath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/publication")
|
|
||||||
.getPath();
|
|
||||||
final String preparedInfoPath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/countrypropagation/preparedInfo/publication")
|
|
||||||
.getPath();
|
|
||||||
SparkCountryPropagationJob
|
|
||||||
.main(
|
|
||||||
new String[] {
|
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
"--sourcePath", sourcePath,
|
|
||||||
"-resultTableName", Publication.class.getCanonicalName(),
|
|
||||||
"-outputPath", workingDir.toString() + "/publication",
|
|
||||||
"-preparedInfoPath", preparedInfoPath
|
|
||||||
});
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
sc
|
||||||
|
.textFile(
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/publication")
|
||||||
|
.getPath()).saveAsTextFile(workingDir.toString() + "/source/publication");
|
||||||
|
|
||||||
|
|
||||||
|
sc
|
||||||
|
.textFile(
|
||||||
|
getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/preparedInfo/publication")
|
||||||
|
.getPath()).saveAsTextFile(workingDir.toString() + "/preparedInfo/publication");
|
||||||
|
|
||||||
|
SparkCountryPropagationJob
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--sourcePath",workingDir.toString() + "/source/publication",
|
||||||
|
"-resultTableName", Publication.class.getCanonicalName(),
|
||||||
|
"-workingPath", workingDir.toString(),
|
||||||
|
"-resultType", "publication"
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
JavaRDD<Publication> tmp = sc
|
JavaRDD<Publication> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/publication")
|
.textFile(workingDir.toString() + "/publication")
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class DatasourceCountryPreparationTest {
|
||||||
new String[] {
|
new String[] {
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"--sourcePath", sourcePath,
|
"--sourcePath", sourcePath,
|
||||||
"--outputPath", workingDir.toString() + "/datasourceCountry",
|
"--workingPath", workingDir.toString() + "/country",
|
||||||
"--allowedtypes", "pubsrepository::institutional",
|
"--allowedtypes", "pubsrepository::institutional",
|
||||||
"--whitelist",
|
"--whitelist",
|
||||||
"10|openaire____::3795d6478e30e2c9f787d427ff160944;10|opendoar____::16e6a3326dd7d868cbc926602a61e4d0;10|eurocrisdris::fe4903425d9040f680d8610d9079ea14;10|openaire____::5b76240cc27a58c6f7ceef7d8c36660e;10|openaire____::172bbccecf8fca44ab6a6653e84cb92a;10|openaire____::149c6590f8a06b46314eed77bfca693f;10|eurocrisdris::a6026877c1a174d60f81fd71f62df1c1;10|openaire____::4692342f0992d91f9e705c26959f09e0;10|openaire____::8d529dbb05ec0284662b391789e8ae2a;10|openaire____::345c9d171ef3c5d706d08041d506428c;10|opendoar____::1c1d4df596d01da60385f0bb17a4a9e0;10|opendoar____::7a614fd06c325499f1680b9896beedeb;10|opendoar____::1ee3dfcd8a0645a25a35977997223d22;10|opendoar____::d296c101daa88a51f6ca8cfc1ac79b50;10|opendoar____::798ed7d4ee7138d49b8828958048130a;10|openaire____::c9d2209ecc4d45ba7b4ca7597acb88a2;10|eurocrisdris::c49e0fe4b9ba7b7fab717d1f0f0a674d;10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539;10|eurocrisdris::432ca599953ff50cd4eeffe22faf3e48"
|
"10|openaire____::3795d6478e30e2c9f787d427ff160944;10|opendoar____::16e6a3326dd7d868cbc926602a61e4d0;10|eurocrisdris::fe4903425d9040f680d8610d9079ea14;10|openaire____::5b76240cc27a58c6f7ceef7d8c36660e;10|openaire____::172bbccecf8fca44ab6a6653e84cb92a;10|openaire____::149c6590f8a06b46314eed77bfca693f;10|eurocrisdris::a6026877c1a174d60f81fd71f62df1c1;10|openaire____::4692342f0992d91f9e705c26959f09e0;10|openaire____::8d529dbb05ec0284662b391789e8ae2a;10|openaire____::345c9d171ef3c5d706d08041d506428c;10|opendoar____::1c1d4df596d01da60385f0bb17a4a9e0;10|opendoar____::7a614fd06c325499f1680b9896beedeb;10|opendoar____::1ee3dfcd8a0645a25a35977997223d22;10|opendoar____::d296c101daa88a51f6ca8cfc1ac79b50;10|opendoar____::798ed7d4ee7138d49b8828958048130a;10|openaire____::c9d2209ecc4d45ba7b4ca7597acb88a2;10|eurocrisdris::c49e0fe4b9ba7b7fab717d1f0f0a674d;10|eurocrisdris::9ae43d14471c4b33661fedda6f06b539;10|eurocrisdris::432ca599953ff50cd4eeffe22faf3e48"
|
||||||
|
@ -73,7 +73,7 @@ public class DatasourceCountryPreparationTest {
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
JavaRDD<DatasourceCountry> tmp = sc
|
JavaRDD<DatasourceCountry> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/datasourceCountry")
|
.textFile(workingDir.toString() + "/country/datasourceCountry")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceCountry.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, DatasourceCountry.class));
|
||||||
|
|
||||||
Assertions.assertEquals(3, tmp.count());
|
Assertions.assertEquals(3, tmp.count());
|
||||||
|
|
|
@ -60,25 +60,25 @@ public class ResultCountryPreparationTest {
|
||||||
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/publication")
|
.getResource("/eu/dnetlib/dhp/countrypropagation/graph/publication")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
final String preparedInfoPath = getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/countrypropagation/datasourcecountry")
|
|
||||||
.getPath();
|
|
||||||
|
|
||||||
PrepareResultCountrySet
|
|
||||||
.main(
|
|
||||||
new String[] {
|
|
||||||
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
|
||||||
"--workingPath", workingDir.toString() + "/working",
|
|
||||||
"--sourcePath", sourcePath,
|
|
||||||
"--outputPath", workingDir.toString() + "/resultCountry",
|
|
||||||
"--preparedInfoPath", preparedInfoPath,
|
|
||||||
"--resultTableName", Publication.class.getCanonicalName()
|
|
||||||
});
|
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
sc.textFile(getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/countrypropagation/datasourcecountry")
|
||||||
|
.getPath()).saveAsTextFile(workingDir+"/country/datasourceCountry"); ;
|
||||||
|
|
||||||
|
PrepareResultCountrySet
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"--isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"--workingPath", workingDir.toString() + "/country",
|
||||||
|
"--sourcePath", sourcePath,
|
||||||
|
"--resultTableName", Publication.class.getCanonicalName()
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
JavaRDD<ResultCountrySet> tmp = sc
|
JavaRDD<ResultCountrySet> tmp = sc
|
||||||
.textFile(workingDir.toString() + "/resultCountry")
|
.textFile(workingDir.toString() + "/country/preparedInfo/publication")
|
||||||
.map(item -> OBJECT_MAPPER.readValue(item, ResultCountrySet.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, ResultCountrySet.class));
|
||||||
|
|
||||||
Assertions.assertEquals(5, tmp.count());
|
Assertions.assertEquals(5, tmp.count());
|
||||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue