2022-05-02 17:43:20 +02:00
|
|
|
|
2022-07-25 14:26:39 +02:00
|
|
|
package eu.dnetlib.dhp.bulktag.eosc;
|
2022-04-21 12:02:09 +02:00
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
import static eu.dnetlib.dhp.PropagationConstant.readPath;
|
|
|
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
|
|
|
|
|
|
|
import java.util.*;
|
|
|
|
|
2022-04-21 12:02:09 +02:00
|
|
|
import org.apache.commons.io.IOUtils;
|
|
|
|
import org.apache.spark.SparkConf;
|
2022-04-21 16:19:04 +02:00
|
|
|
import org.apache.spark.api.java.function.MapFunction;
|
|
|
|
import org.apache.spark.sql.Encoders;
|
|
|
|
import org.apache.spark.sql.SaveMode;
|
2022-04-21 12:02:09 +02:00
|
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
2022-04-21 12:02:09 +02:00
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
import eu.dnetlib.dhp.schema.oaf.*;
|
2022-04-21 12:02:09 +02:00
|
|
|
|
|
|
|
public class SparkEoscTag {
|
2022-05-02 17:43:20 +02:00
|
|
|
private static final Logger log = LoggerFactory.getLogger(SparkEoscTag.class);
|
|
|
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
2022-07-20 18:07:54 +02:00
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
public static void main(String[] args) throws Exception {
|
|
|
|
String jsonConfiguration = IOUtils
|
|
|
|
.toString(
|
|
|
|
SparkEoscTag.class
|
|
|
|
.getResourceAsStream(
|
|
|
|
"/eu/dnetlib/dhp/bulktag/input_eoscTag_parameters.json"));
|
|
|
|
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
Boolean isSparkSessionManaged = Optional
|
|
|
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
|
|
|
.map(Boolean::valueOf)
|
|
|
|
.orElse(Boolean.TRUE);
|
|
|
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
|
|
|
|
|
|
|
final String inputPath = parser.get("sourcePath");
|
|
|
|
log.info("inputPath: {}", inputPath);
|
|
|
|
|
|
|
|
final String workingPath = parser.get("workingPath");
|
|
|
|
log.info("workingPath: {}", workingPath);
|
|
|
|
|
|
|
|
SparkConf conf = new SparkConf();
|
|
|
|
|
|
|
|
runWithSparkSession(
|
|
|
|
conf,
|
|
|
|
isSparkSessionManaged,
|
|
|
|
spark -> {
|
|
|
|
execEoscTag(spark, inputPath, workingPath);
|
|
|
|
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2022-07-21 14:45:43 +02:00
|
|
|
public static EoscIfGuidelines newInstance(String code, String label, String url, String semantics) {
|
2022-07-20 18:07:54 +02:00
|
|
|
EoscIfGuidelines eig = new EoscIfGuidelines();
|
2022-07-21 14:45:43 +02:00
|
|
|
eig.setCode(code);
|
2022-07-20 18:07:54 +02:00
|
|
|
eig.setLabel(label);
|
|
|
|
eig.setUrl(url);
|
|
|
|
eig.setSemanticRelation(semantics);
|
|
|
|
return eig;
|
|
|
|
|
|
|
|
}
|
2022-07-21 14:45:43 +02:00
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
private static void execEoscTag(SparkSession spark, String inputPath, String workingPath) {
|
|
|
|
|
|
|
|
readPath(spark, inputPath + "/software", Software.class)
|
|
|
|
.map((MapFunction<Software, Software>) s -> {
|
|
|
|
|
|
|
|
if (containsCriteriaNotebook(s)) {
|
2022-07-20 18:07:54 +02:00
|
|
|
if (!Optional.ofNullable(s.getEoscifguidelines()).isPresent())
|
|
|
|
s.setEoscifguidelines(new ArrayList<>());
|
2022-07-21 14:45:43 +02:00
|
|
|
addEIG(
|
|
|
|
s.getEoscifguidelines(), "EOSC::Jupyter Notebook", "EOSC::Jupyter Notebook", "",
|
|
|
|
"compliesWith");
|
2022-07-21 14:36:48 +02:00
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
}
|
|
|
|
if (containsCriteriaGalaxy(s)) {
|
2022-07-20 18:07:54 +02:00
|
|
|
if (!Optional.ofNullable(s.getEoscifguidelines()).isPresent())
|
|
|
|
s.setEoscifguidelines(new ArrayList<>());
|
|
|
|
|
2022-07-21 14:45:43 +02:00
|
|
|
addEIG(
|
|
|
|
s.getEoscifguidelines(), "EOSC::Galaxy Workflow", "EOSC::Galaxy Workflow", "", "compliesWith");
|
2022-05-02 17:43:20 +02:00
|
|
|
}
|
|
|
|
return s;
|
|
|
|
}, Encoders.bean(Software.class))
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(workingPath + "/software");
|
|
|
|
|
|
|
|
readPath(spark, workingPath + "/software", Software.class)
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(inputPath + "/software");
|
|
|
|
|
|
|
|
readPath(spark, inputPath + "/otherresearchproduct", OtherResearchProduct.class)
|
|
|
|
.map((MapFunction<OtherResearchProduct, OtherResearchProduct>) orp -> {
|
2022-07-21 14:36:48 +02:00
|
|
|
|
2022-07-20 18:07:54 +02:00
|
|
|
if (!Optional.ofNullable(orp.getEoscifguidelines()).isPresent())
|
|
|
|
orp.setEoscifguidelines(new ArrayList<>());
|
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
if (containsCriteriaGalaxy(orp)) {
|
2022-07-21 14:45:43 +02:00
|
|
|
addEIG(
|
|
|
|
orp.getEoscifguidelines(), "EOSC::Galaxy Workflow", "EOSC::Galaxy Workflow", "",
|
|
|
|
"compliesWith");
|
2022-05-02 17:43:20 +02:00
|
|
|
}
|
|
|
|
if (containscriteriaTwitter(orp)) {
|
2022-07-21 14:45:43 +02:00
|
|
|
addEIG(orp.getEoscifguidelines(), "EOSC::Twitter Data", "EOSC::Twitter Data", "", "compliesWith");
|
2022-05-02 17:43:20 +02:00
|
|
|
}
|
|
|
|
return orp;
|
|
|
|
}, Encoders.bean(OtherResearchProduct.class))
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(workingPath + "/otherresearchproduct");
|
|
|
|
|
|
|
|
readPath(spark, workingPath + "/otherresearchproduct", OtherResearchProduct.class)
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(inputPath + "/otherresearchproduct");
|
|
|
|
|
|
|
|
readPath(spark, inputPath + "/dataset", Dataset.class)
|
|
|
|
.map((MapFunction<Dataset, Dataset>) d -> {
|
2022-07-21 14:36:48 +02:00
|
|
|
|
2022-07-20 18:07:54 +02:00
|
|
|
if (!Optional.ofNullable(d.getEoscifguidelines()).isPresent())
|
|
|
|
d.setEoscifguidelines(new ArrayList<>());
|
2022-05-02 17:43:20 +02:00
|
|
|
if (containscriteriaTwitter(d)) {
|
2022-07-21 14:45:43 +02:00
|
|
|
addEIG(d.getEoscifguidelines(), "EOSC::Twitter Data", "EOSC::Twitter Data", "", "compliesWith");
|
2022-05-02 17:43:20 +02:00
|
|
|
}
|
|
|
|
return d;
|
|
|
|
}, Encoders.bean(Dataset.class))
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(workingPath + "/dataset");
|
|
|
|
|
|
|
|
readPath(spark, workingPath + "/dataset", Dataset.class)
|
|
|
|
.write()
|
|
|
|
.mode(SaveMode.Overwrite)
|
|
|
|
.option("compression", "gzip")
|
|
|
|
.json(inputPath + "/dataset");
|
|
|
|
}
|
|
|
|
|
2022-07-21 14:45:43 +02:00
|
|
|
private static void addEIG(List<EoscIfGuidelines> eoscifguidelines, String code, String label, String url,
|
|
|
|
String sem) {
|
2022-07-21 14:36:48 +02:00
|
|
|
if (!eoscifguidelines.stream().anyMatch(eig -> eig.getCode().equals(code)))
|
|
|
|
eoscifguidelines.add(newInstance(code, label, url, sem));
|
|
|
|
}
|
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
private static boolean containscriteriaTwitter(Result r) {
|
|
|
|
Set<String> words = getWordsSP(r.getTitle());
|
|
|
|
words.addAll(getWordsF(r.getDescription()));
|
|
|
|
|
|
|
|
if (words.contains("twitter") &&
|
|
|
|
(words.contains("data") || words.contains("dataset")))
|
|
|
|
return true;
|
|
|
|
|
|
|
|
if (r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("twitter")) &&
|
|
|
|
r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("data")))
|
|
|
|
return true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
private static boolean containsCriteriaGalaxy(Result r) {
|
|
|
|
Set<String> words = getWordsSP(r.getTitle());
|
|
|
|
words.addAll(getWordsF(r.getDescription()));
|
|
|
|
if (words.contains("galaxy") &&
|
2022-05-04 10:06:38 +02:00
|
|
|
words.contains("workflow"))
|
2022-05-02 17:43:20 +02:00
|
|
|
return true;
|
|
|
|
|
|
|
|
if (r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("galaxy")) &&
|
2022-05-04 10:06:38 +02:00
|
|
|
r.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("workflow")))
|
2022-05-02 17:43:20 +02:00
|
|
|
return true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
private static boolean containsCriteriaNotebook(Software s) {
|
|
|
|
if (s.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("jupyter")))
|
|
|
|
return true;
|
|
|
|
if (s
|
|
|
|
.getSubject()
|
|
|
|
.stream()
|
|
|
|
.anyMatch(
|
|
|
|
sbj -> sbj.getValue().toLowerCase().contains("python") &&
|
|
|
|
sbj.getValue().toLowerCase().contains("notebook")))
|
|
|
|
return true;
|
|
|
|
if (s.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("python")) &&
|
|
|
|
s.getSubject().stream().anyMatch(sbj -> sbj.getValue().toLowerCase().contains("notebook")))
|
|
|
|
return true;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
private static Set<String> getWordsSP(List<StructuredProperty> elem) {
|
|
|
|
Set<String> words = new HashSet<>();
|
2022-05-04 10:06:38 +02:00
|
|
|
Optional
|
|
|
|
.ofNullable(elem)
|
|
|
|
.ifPresent(
|
|
|
|
e -> e
|
|
|
|
.forEach(
|
|
|
|
t -> words
|
|
|
|
.addAll(
|
|
|
|
Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" ")))));
|
2022-05-02 17:43:20 +02:00
|
|
|
return words;
|
|
|
|
}
|
|
|
|
|
|
|
|
private static Set<String> getWordsF(List<Field<String>> elem) {
|
|
|
|
Set<String> words = new HashSet<>();
|
2022-05-04 10:06:38 +02:00
|
|
|
Optional
|
|
|
|
.ofNullable(elem)
|
|
|
|
.ifPresent(
|
|
|
|
e -> e
|
|
|
|
.forEach(
|
|
|
|
t -> words
|
|
|
|
.addAll(
|
|
|
|
Arrays.asList(t.getValue().toLowerCase().replaceAll("[^a-zA-Z ]", "").split(" ")))));
|
2022-07-20 18:07:54 +02:00
|
|
|
|
2022-05-02 17:43:20 +02:00
|
|
|
return words;
|
|
|
|
|
|
|
|
}
|
2022-04-21 12:02:09 +02:00
|
|
|
}
|