forked from D-Net/dnet-hadoop
WIP merged beta into main
This commit is contained in:
parent
2ba67f08d3
commit
81bfe3fe32
|
@ -27,3 +27,4 @@ spark-warehouse
|
||||||
/**/.factorypath
|
/**/.factorypath
|
||||||
/**/.scalafmt.conf
|
/**/.scalafmt.conf
|
||||||
/.java-version
|
/.java-version
|
||||||
|
/dhp-shade-package/dependency-reduced-pom.xml
|
||||||
|
|
|
@ -80,7 +80,15 @@ class WritePredefinedProjectPropertiesTest {
|
||||||
mojo.outputFile = testFolder;
|
mojo.outputFile = testFolder;
|
||||||
|
|
||||||
// execute
|
// execute
|
||||||
Assertions.assertThrows(MojoExecutionException.class, () -> mojo.execute());
|
try {
|
||||||
|
mojo.execute();
|
||||||
|
Assertions.assertTrue(false); // not reached
|
||||||
|
} catch (Exception e) {
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
MojoExecutionException.class.isAssignableFrom(e.getClass()) ||
|
||||||
|
IllegalArgumentException.class.isAssignableFrom(e.getClass()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -38,7 +38,7 @@ public class PacePerson {
|
||||||
PacePerson.class
|
PacePerson.class
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/common/name_particles.txt")));
|
"/eu/dnetlib/dhp/common/name_particles.txt")));
|
||||||
} catch (IOException e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,9 +12,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.math.NumberUtils;
|
import org.apache.commons.lang3.math.NumberUtils;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
|
||||||
import org.apache.http.HttpHeaders;
|
import org.apache.http.HttpHeaders;
|
||||||
import org.joda.time.Instant;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -468,6 +468,10 @@ public class MergeUtils {
|
||||||
merge.setIsInDiamondJournal(booleanOR(merge.getIsInDiamondJournal(), enrich.getIsInDiamondJournal()));
|
merge.setIsInDiamondJournal(booleanOR(merge.getIsInDiamondJournal(), enrich.getIsInDiamondJournal()));
|
||||||
merge.setPubliclyFunded(booleanOR(merge.getPubliclyFunded(), enrich.getPubliclyFunded()));
|
merge.setPubliclyFunded(booleanOR(merge.getPubliclyFunded(), enrich.getPubliclyFunded()));
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(merge.getTransformativeAgreement())) {
|
||||||
|
merge.setTransformativeAgreement(enrich.getTransformativeAgreement());
|
||||||
|
}
|
||||||
|
|
||||||
return merge;
|
return merge;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -154,5 +154,13 @@
|
||||||
"unknown":{
|
"unknown":{
|
||||||
"original":"Unknown",
|
"original":"Unknown",
|
||||||
"inverse":"Unknown"
|
"inverse":"Unknown"
|
||||||
|
},
|
||||||
|
"isamongtopnsimilardocuments": {
|
||||||
|
"original": "IsAmongTopNSimilarDocuments",
|
||||||
|
"inverse": "HasAmongTopNSimilarDocuments"
|
||||||
|
},
|
||||||
|
"hasamongtopnsimilardocuments": {
|
||||||
|
"original": "HasAmongTopNSimilarDocuments",
|
||||||
|
"inverse": "IsAmongTopNSimilarDocuments"
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -65,12 +65,13 @@ abstract class AbstractScalaApplication(
|
||||||
val conf: SparkConf = new SparkConf()
|
val conf: SparkConf = new SparkConf()
|
||||||
val master = parser.get("master")
|
val master = parser.get("master")
|
||||||
log.info(s"Creating Spark session: Master: $master")
|
log.info(s"Creating Spark session: Master: $master")
|
||||||
SparkSession
|
val b = SparkSession
|
||||||
.builder()
|
.builder()
|
||||||
.config(conf)
|
.config(conf)
|
||||||
.appName(getClass.getSimpleName)
|
.appName(getClass.getSimpleName)
|
||||||
.master(master)
|
if (master != null)
|
||||||
.getOrCreate()
|
b.master(master)
|
||||||
|
b.getOrCreate()
|
||||||
}
|
}
|
||||||
|
|
||||||
def reportTotalSize(targetPath: String, outputBasePath: String): Unit = {
|
def reportTotalSize(targetPath: String, outputBasePath: String): Unit = {
|
||||||
|
|
|
@ -65,7 +65,11 @@ object ScholixUtils extends Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
def generateScholixResourceFromResult(r: Result): ScholixResource = {
|
def generateScholixResourceFromResult(r: Result): ScholixResource = {
|
||||||
|
val sum = ScholixUtils.resultToSummary(r)
|
||||||
|
if (sum != null)
|
||||||
generateScholixResourceFromSummary(ScholixUtils.resultToSummary(r))
|
generateScholixResourceFromSummary(ScholixUtils.resultToSummary(r))
|
||||||
|
else
|
||||||
|
null
|
||||||
}
|
}
|
||||||
|
|
||||||
val statsAggregator: Aggregator[(String, String, Long), RelatedEntities, RelatedEntities] =
|
val statsAggregator: Aggregator[(String, String, Long), RelatedEntities, RelatedEntities] =
|
||||||
|
@ -153,6 +157,14 @@ object ScholixUtils extends Serializable {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def invRel(rel: String): String = {
|
||||||
|
val semanticRelation = relations.getOrElse(rel.toLowerCase, null)
|
||||||
|
if (semanticRelation != null)
|
||||||
|
semanticRelation.inverse
|
||||||
|
else
|
||||||
|
null
|
||||||
|
}
|
||||||
|
|
||||||
def extractCollectedFrom(summary: ScholixResource): List[ScholixEntityId] = {
|
def extractCollectedFrom(summary: ScholixResource): List[ScholixEntityId] = {
|
||||||
if (summary.getCollectedFrom != null && !summary.getCollectedFrom.isEmpty) {
|
if (summary.getCollectedFrom != null && !summary.getCollectedFrom.isEmpty) {
|
||||||
val l: List[ScholixEntityId] = summary.getCollectedFrom.asScala.map { d =>
|
val l: List[ScholixEntityId] = summary.getCollectedFrom.asScala.map { d =>
|
||||||
|
@ -377,10 +389,7 @@ object ScholixUtils extends Serializable {
|
||||||
if (persistentIdentifiers.isEmpty)
|
if (persistentIdentifiers.isEmpty)
|
||||||
return null
|
return null
|
||||||
s.setLocalIdentifier(persistentIdentifiers.asJava)
|
s.setLocalIdentifier(persistentIdentifiers.asJava)
|
||||||
if (r.isInstanceOf[Publication])
|
// s.setTypology(r.getResulttype.getClassid)
|
||||||
s.setTypology(Typology.publication)
|
|
||||||
else
|
|
||||||
s.setTypology(Typology.dataset)
|
|
||||||
|
|
||||||
s.setSubType(r.getInstance().get(0).getInstancetype.getClassname)
|
s.setSubType(r.getInstance().get(0).getInstancetype.getClassname)
|
||||||
|
|
||||||
|
|
|
@ -177,7 +177,7 @@ class OafMapperUtilsTest {
|
||||||
assertTrue(cfId(d1.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
assertTrue(cfId(d1.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ModelConstants.DATASET_RESULTTYPE_CLASSID,
|
ModelConstants.PUBLICATION_RESULTTYPE_CLASSID,
|
||||||
((Result) MergeUtils
|
((Result) MergeUtils
|
||||||
.merge(p2, d1))
|
.merge(p2, d1))
|
||||||
.getResulttype()
|
.getResulttype()
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>scala-compile-first</id>
|
<id>scala-compile-first</id>
|
||||||
<phase>initialize</phase>
|
<phase>process-resources</phase>
|
||||||
<goals>
|
<goals>
|
||||||
<goal>add-source</goal>
|
<goal>add-source</goal>
|
||||||
<goal>compile</goal>
|
<goal>compile</goal>
|
||||||
|
@ -59,14 +59,6 @@
|
||||||
<groupId>edu.cmu</groupId>
|
<groupId>edu.cmu</groupId>
|
||||||
<artifactId>secondstring</artifactId>
|
<artifactId>secondstring</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>com.google.guava</groupId>
|
|
||||||
<artifactId>guava</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.google.code.gson</groupId>
|
|
||||||
<artifactId>gson</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>commons-lang3</artifactId>
|
||||||
|
@ -91,10 +83,6 @@
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
<groupId>com.fasterxml.jackson.core</groupId>
|
||||||
<artifactId>jackson-databind</artifactId>
|
<artifactId>jackson-databind</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.commons</groupId>
|
|
||||||
<artifactId>commons-math3</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.jayway.jsonpath</groupId>
|
<groupId>com.jayway.jsonpath</groupId>
|
||||||
<artifactId>json-path</artifactId>
|
<artifactId>json-path</artifactId>
|
||||||
|
@ -113,4 +101,90 @@
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
<profiles>
|
||||||
|
<profile>
|
||||||
|
<id>spark-24</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>true</activeByDefault>
|
||||||
|
</activation>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>build-helper-maven-plugin</artifactId>
|
||||||
|
<version>3.4.0</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>generate-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>add-source</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<sources>
|
||||||
|
<source>src/main/spark-2</source>
|
||||||
|
</sources>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
|
|
||||||
|
<profile>
|
||||||
|
<id>spark-34</id>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>build-helper-maven-plugin</artifactId>
|
||||||
|
<version>3.4.0</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>generate-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>add-source</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<sources>
|
||||||
|
<source>src/main/spark-2</source>
|
||||||
|
</sources>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
|
|
||||||
|
<profile>
|
||||||
|
<id>spark-35</id>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>build-helper-maven-plugin</artifactId>
|
||||||
|
<version>3.4.0</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>generate-sources</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>add-source</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<sources>
|
||||||
|
<source>src/main/spark-35</source>
|
||||||
|
</sources>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
||||||
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -2,31 +2,41 @@
|
||||||
package eu.dnetlib.pace.clustering;
|
package eu.dnetlib.pace.clustering;
|
||||||
|
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
import eu.dnetlib.pace.config.Config;
|
import eu.dnetlib.pace.config.Config;
|
||||||
|
|
||||||
@ClusteringClass("keywordsclustering")
|
@ClusteringClass("legalnameclustering")
|
||||||
public class KeywordsClustering extends AbstractClusteringFunction {
|
public class LegalnameClustering extends AbstractClusteringFunction {
|
||||||
|
|
||||||
public KeywordsClustering(Map<String, Object> params) {
|
private static final Pattern CITY_CODE_PATTERN = Pattern.compile("city::\\d+");
|
||||||
|
private static final Pattern KEYWORD_CODE_PATTERN = Pattern.compile("key::\\d+");
|
||||||
|
|
||||||
|
public LegalnameClustering(Map<String, Object> params) {
|
||||||
super(params);
|
super(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Set<String> getRegexList(String input, Pattern codeRegex) {
|
||||||
|
Matcher matcher = codeRegex.matcher(input);
|
||||||
|
Set<String> cities = new HashSet<>();
|
||||||
|
while (matcher.find()) {
|
||||||
|
cities.add(matcher.group());
|
||||||
|
}
|
||||||
|
return cities;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Collection<String> doApply(final Config conf, String s) {
|
protected Collection<String> doApply(final Config conf, String s) {
|
||||||
|
|
||||||
// takes city codes and keywords codes without duplicates
|
|
||||||
Set<String> keywords = getKeywords(s, conf.translationMap(), paramOrDefault("windowSize", 4));
|
|
||||||
Set<String> cities = getCities(s, paramOrDefault("windowSize", 4));
|
|
||||||
|
|
||||||
// list of combination to return as result
|
// list of combination to return as result
|
||||||
final Collection<String> combinations = new LinkedHashSet<String>();
|
final Collection<String> combinations = new LinkedHashSet<String>();
|
||||||
|
|
||||||
for (String keyword : keywordsToCodes(keywords, conf.translationMap())) {
|
for (String keyword : getRegexList(s, KEYWORD_CODE_PATTERN)) {
|
||||||
for (String city : citiesToCodes(cities)) {
|
for (String city : getRegexList(s, CITY_CODE_PATTERN)) {
|
||||||
combinations.add(keyword + "-" + city);
|
combinations.add(keyword + "-" + city);
|
||||||
if (combinations.size() >= paramOrDefault("max", 2)) {
|
if (combinations.size() >= paramOrDefault("max", 2)) {
|
||||||
return combinations;
|
return combinations;
|
||||||
|
@ -42,9 +52,6 @@ public class KeywordsClustering extends AbstractClusteringFunction {
|
||||||
return fields
|
return fields
|
||||||
.stream()
|
.stream()
|
||||||
.filter(f -> !f.isEmpty())
|
.filter(f -> !f.isEmpty())
|
||||||
.map(KeywordsClustering::cleanup)
|
|
||||||
.map(KeywordsClustering::normalize)
|
|
||||||
.map(s -> filterAllStopWords(s))
|
|
||||||
.map(s -> doApply(conf, s))
|
.map(s -> doApply(conf, s))
|
||||||
.map(c -> filterBlacklisted(c, ngramBlacklist))
|
.map(c -> filterBlacklisted(c, ngramBlacklist))
|
||||||
.flatMap(c -> c.stream())
|
.flatMap(c -> c.stream())
|
|
@ -27,6 +27,14 @@ public class AbstractPaceFunctions extends PaceCommonUtils {
|
||||||
private static Map<String, String> cityMap = AbstractPaceFunctions
|
private static Map<String, String> cityMap = AbstractPaceFunctions
|
||||||
.loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv");
|
.loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv");
|
||||||
|
|
||||||
|
// keywords map to be used when translating the keyword names into codes
|
||||||
|
private static Map<String, String> keywordMap = AbstractPaceFunctions
|
||||||
|
.loadMapFromClasspath("/eu/dnetlib/pace/config/translation_map.csv");
|
||||||
|
|
||||||
|
// country map to be used when inferring the country from the city name
|
||||||
|
private static Map<String, String> countryMap = AbstractPaceFunctions
|
||||||
|
.loadCountryMapFromClasspath("/eu/dnetlib/pace/config/country_map.csv");
|
||||||
|
|
||||||
// list of stopwords in different languages
|
// list of stopwords in different languages
|
||||||
protected static Set<String> stopwords_gr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_gr.txt");
|
protected static Set<String> stopwords_gr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_gr.txt");
|
||||||
protected static Set<String> stopwords_en = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
|
protected static Set<String> stopwords_en = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
|
||||||
|
@ -74,6 +82,64 @@ public class AbstractPaceFunctions extends PaceCommonUtils {
|
||||||
return s12;
|
return s12;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String countryInference(final String original, String inferFrom) {
|
||||||
|
if (!original.equalsIgnoreCase("unknown"))
|
||||||
|
return original;
|
||||||
|
|
||||||
|
inferFrom = cleanup(inferFrom);
|
||||||
|
inferFrom = normalize(inferFrom);
|
||||||
|
inferFrom = filterAllStopWords(inferFrom);
|
||||||
|
Set<String> cities = getCities(inferFrom, 4);
|
||||||
|
return citiesToCountry(cities).stream().findFirst().orElse("UNKNOWN");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String cityInference(String original) {
|
||||||
|
original = cleanup(original);
|
||||||
|
original = normalize(original);
|
||||||
|
original = filterAllStopWords(original);
|
||||||
|
|
||||||
|
Set<String> cities = getCities(original, 4);
|
||||||
|
|
||||||
|
for (String city : cities) {
|
||||||
|
original = original.replaceAll(city, cityMap.get(city));
|
||||||
|
}
|
||||||
|
|
||||||
|
return original;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String keywordInference(String original) {
|
||||||
|
original = cleanup(original);
|
||||||
|
original = normalize(original);
|
||||||
|
original = filterAllStopWords(original);
|
||||||
|
|
||||||
|
Set<String> keywords = getKeywords(original, keywordMap, 4);
|
||||||
|
|
||||||
|
for (String keyword : keywords) {
|
||||||
|
original = original.replaceAll(keyword, keywordMap.get(keyword));
|
||||||
|
}
|
||||||
|
|
||||||
|
return original;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String cityKeywordInference(String original) {
|
||||||
|
original = cleanup(original);
|
||||||
|
original = normalize(original);
|
||||||
|
original = filterAllStopWords(original);
|
||||||
|
|
||||||
|
Set<String> keywords = getKeywords(original, keywordMap, 4);
|
||||||
|
Set<String> cities = getCities(original, 4);
|
||||||
|
|
||||||
|
for (String keyword : keywords) {
|
||||||
|
original = original.replaceAll(keyword, keywordMap.get(keyword));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (String city : cities) {
|
||||||
|
original = original.replaceAll(city, cityMap.get(city));
|
||||||
|
}
|
||||||
|
|
||||||
|
return original;
|
||||||
|
}
|
||||||
|
|
||||||
protected static String fixXML(final String a) {
|
protected static String fixXML(final String a) {
|
||||||
|
|
||||||
return a
|
return a
|
||||||
|
@ -208,6 +274,30 @@ public class AbstractPaceFunctions extends PaceCommonUtils {
|
||||||
return m;
|
return m;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Map<String, String> loadCountryMapFromClasspath(final String classpath) {
|
||||||
|
|
||||||
|
Transliterator transliterator = Transliterator.getInstance("Any-Eng");
|
||||||
|
|
||||||
|
final Map<String, String> m = new HashMap<>();
|
||||||
|
try {
|
||||||
|
for (final String s : IOUtils
|
||||||
|
.readLines(AbstractPaceFunctions.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
|
||||||
|
// string is like this: country_code;city1;city2;city3
|
||||||
|
String[] line = s.split(";");
|
||||||
|
String value = line[0];
|
||||||
|
for (int i = 1; i < line.length; i++) {
|
||||||
|
String city = fixAliases(transliterator.transliterate(line[i].toLowerCase()));
|
||||||
|
String code = cityMap.get(city);
|
||||||
|
m.put(code, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
return new HashMap<>();
|
||||||
|
}
|
||||||
|
return m;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
public static String removeKeywords(String s, Set<String> keywords) {
|
public static String removeKeywords(String s, Set<String> keywords) {
|
||||||
|
|
||||||
s = " " + s + " ";
|
s = " " + s + " ";
|
||||||
|
@ -237,6 +327,10 @@ public class AbstractPaceFunctions extends PaceCommonUtils {
|
||||||
return toCodes(keywords, cityMap);
|
return toCodes(keywords, cityMap);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Set<String> citiesToCountry(Set<String> cities) {
|
||||||
|
return toCodes(toCodes(cities, cityMap), countryMap);
|
||||||
|
}
|
||||||
|
|
||||||
protected static String firstLC(final String s) {
|
protected static String firstLC(final String s) {
|
||||||
return StringUtils.substring(s, 0, 1).toLowerCase();
|
return StringUtils.substring(s, 0, 1).toLowerCase();
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,9 +47,21 @@ public class FieldDef implements Serializable {
|
||||||
|
|
||||||
private String clean;
|
private String clean;
|
||||||
|
|
||||||
|
private String infer;
|
||||||
|
|
||||||
|
private String inferenceFrom;
|
||||||
|
|
||||||
public FieldDef() {
|
public FieldDef() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getInferenceFrom() {
|
||||||
|
return inferenceFrom;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInferenceFrom(final String inferenceFrom) {
|
||||||
|
this.inferenceFrom = inferenceFrom;
|
||||||
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
@ -126,6 +138,14 @@ public class FieldDef implements Serializable {
|
||||||
this.clean = clean;
|
this.clean = clean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getInfer() {
|
||||||
|
return infer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInfer(String infer) {
|
||||||
|
this.infer = infer;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -3,7 +3,7 @@ package eu.dnetlib.pace.model
|
||||||
import com.jayway.jsonpath.{Configuration, JsonPath}
|
import com.jayway.jsonpath.{Configuration, JsonPath}
|
||||||
import eu.dnetlib.pace.common.AbstractPaceFunctions
|
import eu.dnetlib.pace.common.AbstractPaceFunctions
|
||||||
import eu.dnetlib.pace.config.{DedupConfig, Type}
|
import eu.dnetlib.pace.config.{DedupConfig, Type}
|
||||||
import eu.dnetlib.pace.util.MapDocumentUtil
|
import eu.dnetlib.pace.util.{MapDocumentUtil, SparkCompatUtils}
|
||||||
import org.apache.commons.lang3.StringUtils
|
import org.apache.commons.lang3.StringUtils
|
||||||
import org.apache.spark.sql.catalyst.encoders.RowEncoder
|
import org.apache.spark.sql.catalyst.encoders.RowEncoder
|
||||||
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
|
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
|
||||||
|
@ -52,7 +52,7 @@ case class SparkModel(conf: DedupConfig) {
|
||||||
val orderingFieldPosition: Int = schema.fieldIndex(orderingFieldName)
|
val orderingFieldPosition: Int = schema.fieldIndex(orderingFieldName)
|
||||||
|
|
||||||
val parseJsonDataset: (Dataset[String] => Dataset[Row]) = df => {
|
val parseJsonDataset: (Dataset[String] => Dataset[Row]) = df => {
|
||||||
df.map(r => rowFromJson(r))(RowEncoder(schema))
|
df.map(r => rowFromJson(r))(SparkCompatUtils.encoderFor(schema))
|
||||||
}
|
}
|
||||||
|
|
||||||
def rowFromJson(json: String): Row = {
|
def rowFromJson(json: String): Row = {
|
||||||
|
@ -123,9 +123,19 @@ case class SparkModel(conf: DedupConfig) {
|
||||||
case _ => res(index)
|
case _ => res(index)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (StringUtils.isNotBlank(fdef.getInfer)) {
|
||||||
|
val inferFrom : String = if (StringUtils.isNotBlank(fdef.getInferenceFrom)) fdef.getInferenceFrom else fdef.getPath
|
||||||
|
res(index) = res(index) match {
|
||||||
|
case x: Seq[String] => x.map(inference(_, MapDocumentUtil.getJPathString(inferFrom, documentContext), fdef.getInfer))
|
||||||
|
case _ => inference(res(index).toString, MapDocumentUtil.getJPathString(inferFrom, documentContext), fdef.getInfer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
new GenericRowWithSchema(values, schema)
|
new GenericRowWithSchema(values, schema)
|
||||||
|
@ -146,5 +156,17 @@ case class SparkModel(conf: DedupConfig) {
|
||||||
res
|
res
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def inference(value: String, inferfrom: String, infertype: String) : String = {
|
||||||
|
val res = infertype match {
|
||||||
|
case "country" => AbstractPaceFunctions.countryInference(value, inferfrom)
|
||||||
|
case "city" => AbstractPaceFunctions.cityInference(value)
|
||||||
|
case "keyword" => AbstractPaceFunctions.keywordInference(value)
|
||||||
|
case "city_keyword" => AbstractPaceFunctions.cityKeywordInference(value)
|
||||||
|
case _ => value
|
||||||
|
}
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.pace.tree;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import eu.dnetlib.pace.config.Config;
|
|
||||||
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
|
|
||||||
import eu.dnetlib.pace.tree.support.ComparatorClass;
|
|
||||||
|
|
||||||
@ComparatorClass("cityMatch")
|
|
||||||
public class CityMatch extends AbstractStringComparator {
|
|
||||||
|
|
||||||
private Map<String, String> params;
|
|
||||||
|
|
||||||
public CityMatch(Map<String, String> params) {
|
|
||||||
super(params);
|
|
||||||
this.params = params;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double distance(final String a, final String b, final Config conf) {
|
|
||||||
|
|
||||||
String ca = cleanup(a);
|
|
||||||
String cb = cleanup(b);
|
|
||||||
|
|
||||||
ca = normalize(ca);
|
|
||||||
cb = normalize(cb);
|
|
||||||
|
|
||||||
ca = filterAllStopWords(ca);
|
|
||||||
cb = filterAllStopWords(cb);
|
|
||||||
|
|
||||||
Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
|
|
||||||
Set<String> codes1 = citiesToCodes(cities1);
|
|
||||||
Set<String> codes2 = citiesToCodes(cities2);
|
|
||||||
|
|
||||||
// if no cities are detected, the comparator gives 1.0
|
|
||||||
if (codes1.isEmpty() && codes2.isEmpty())
|
|
||||||
return 1.0;
|
|
||||||
else {
|
|
||||||
if (codes1.isEmpty() ^ codes2.isEmpty())
|
|
||||||
return -1; // undefined if one of the two has no cities
|
|
||||||
return commonElementsPercentage(codes1, codes2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,51 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.pace.tree;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import eu.dnetlib.pace.config.Config;
|
||||||
|
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
|
||||||
|
import eu.dnetlib.pace.tree.support.ComparatorClass;
|
||||||
|
|
||||||
|
@ComparatorClass("codeMatch")
|
||||||
|
public class CodeMatch extends AbstractStringComparator {
|
||||||
|
|
||||||
|
private Map<String, String> params;
|
||||||
|
|
||||||
|
private Pattern CODE_REGEX;
|
||||||
|
|
||||||
|
public CodeMatch(Map<String, String> params) {
|
||||||
|
super(params);
|
||||||
|
this.params = params;
|
||||||
|
this.CODE_REGEX = Pattern.compile(params.getOrDefault("codeRegex", "[a-zA-Z]::\\d+"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public Set<String> getRegexList(String input) {
|
||||||
|
Matcher matcher = this.CODE_REGEX.matcher(input);
|
||||||
|
Set<String> cities = new HashSet<>();
|
||||||
|
while (matcher.find()) {
|
||||||
|
cities.add(matcher.group());
|
||||||
|
}
|
||||||
|
return cities;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double distance(final String a, final String b, final Config conf) {
|
||||||
|
|
||||||
|
Set<String> codes1 = getRegexList(a);
|
||||||
|
Set<String> codes2 = getRegexList(b);
|
||||||
|
|
||||||
|
// if no codes are detected, the comparator gives 1.0
|
||||||
|
if (codes1.isEmpty() && codes2.isEmpty())
|
||||||
|
return 1.0;
|
||||||
|
else {
|
||||||
|
if (codes1.isEmpty() ^ codes2.isEmpty())
|
||||||
|
return -1; // undefined if one of the two has no codes
|
||||||
|
return commonElementsPercentage(codes1, codes2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,6 +2,7 @@
|
||||||
package eu.dnetlib.pace.tree;
|
package eu.dnetlib.pace.tree;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import com.wcohen.ss.AbstractStringDistance;
|
import com.wcohen.ss.AbstractStringDistance;
|
||||||
|
|
||||||
|
@ -12,8 +13,11 @@ import eu.dnetlib.pace.tree.support.ComparatorClass;
|
||||||
@ComparatorClass("countryMatch")
|
@ComparatorClass("countryMatch")
|
||||||
public class CountryMatch extends AbstractStringComparator {
|
public class CountryMatch extends AbstractStringComparator {
|
||||||
|
|
||||||
|
private Map<String, String> params;
|
||||||
|
|
||||||
public CountryMatch(Map<String, String> params) {
|
public CountryMatch(Map<String, String> params) {
|
||||||
super(params, new com.wcohen.ss.JaroWinkler());
|
super(params, new com.wcohen.ss.JaroWinkler());
|
||||||
|
this.params = params;
|
||||||
}
|
}
|
||||||
|
|
||||||
public CountryMatch(final double weight) {
|
public CountryMatch(final double weight) {
|
||||||
|
@ -26,6 +30,7 @@ public class CountryMatch extends AbstractStringComparator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double distance(final String a, final String b, final Config conf) {
|
public double distance(final String a, final String b, final Config conf) {
|
||||||
|
|
||||||
if (a.isEmpty() || b.isEmpty()) {
|
if (a.isEmpty() || b.isEmpty()) {
|
||||||
return -1.0; // return -1 if a field is missing
|
return -1.0; // return -1 if a field is missing
|
||||||
}
|
}
|
||||||
|
@ -45,4 +50,5 @@ public class CountryMatch extends AbstractStringComparator {
|
||||||
protected double normalize(final double d) {
|
protected double normalize(final double d) {
|
||||||
return d;
|
return d;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.pace.tree;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import com.wcohen.ss.AbstractStringDistance;
|
||||||
|
|
||||||
|
import eu.dnetlib.pace.config.Config;
|
||||||
|
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
|
||||||
|
import eu.dnetlib.pace.tree.support.ComparatorClass;
|
||||||
|
|
||||||
|
@ComparatorClass("jaroWinklerLegalname")
|
||||||
|
public class JaroWinklerLegalname extends AbstractStringComparator {
|
||||||
|
|
||||||
|
private Map<String, String> params;
|
||||||
|
|
||||||
|
private final String CITY_CODE_REGEX = "city::\\d+";
|
||||||
|
private final String KEYWORD_CODE_REGEX = "key::\\d+";
|
||||||
|
|
||||||
|
public JaroWinklerLegalname(Map<String, String> params) {
|
||||||
|
super(params, new com.wcohen.ss.JaroWinkler());
|
||||||
|
this.params = params;
|
||||||
|
}
|
||||||
|
|
||||||
|
public JaroWinklerLegalname(double weight) {
|
||||||
|
super(weight, new com.wcohen.ss.JaroWinkler());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected JaroWinklerLegalname(double weight, AbstractStringDistance ssalgo) {
|
||||||
|
super(weight, ssalgo);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double distance(String a, String b, final Config conf) {
|
||||||
|
|
||||||
|
String ca = a.replaceAll(CITY_CODE_REGEX, "").replaceAll(KEYWORD_CODE_REGEX, " ");
|
||||||
|
String cb = b.replaceAll(CITY_CODE_REGEX, "").replaceAll(KEYWORD_CODE_REGEX, " ");
|
||||||
|
|
||||||
|
ca = ca.replaceAll("[ ]{2,}", " ");
|
||||||
|
cb = cb.replaceAll("[ ]{2,}", " ");
|
||||||
|
|
||||||
|
if (ca.isEmpty() && cb.isEmpty())
|
||||||
|
return 1.0;
|
||||||
|
else
|
||||||
|
return normalize(ssalgo.score(ca, cb));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double getWeight() {
|
||||||
|
return super.weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected double normalize(double d) {
|
||||||
|
return d;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,74 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.pace.tree;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import com.wcohen.ss.AbstractStringDistance;
|
|
||||||
|
|
||||||
import eu.dnetlib.pace.config.Config;
|
|
||||||
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
|
|
||||||
import eu.dnetlib.pace.tree.support.ComparatorClass;
|
|
||||||
|
|
||||||
@ComparatorClass("jaroWinklerNormalizedName")
|
|
||||||
public class JaroWinklerNormalizedName extends AbstractStringComparator {
|
|
||||||
|
|
||||||
private Map<String, String> params;
|
|
||||||
|
|
||||||
public JaroWinklerNormalizedName(Map<String, String> params) {
|
|
||||||
super(params, new com.wcohen.ss.JaroWinkler());
|
|
||||||
this.params = params;
|
|
||||||
}
|
|
||||||
|
|
||||||
public JaroWinklerNormalizedName(double weight) {
|
|
||||||
super(weight, new com.wcohen.ss.JaroWinkler());
|
|
||||||
}
|
|
||||||
|
|
||||||
protected JaroWinklerNormalizedName(double weight, AbstractStringDistance ssalgo) {
|
|
||||||
super(weight, ssalgo);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double distance(String a, String b, final Config conf) {
|
|
||||||
String ca = cleanup(a);
|
|
||||||
String cb = cleanup(b);
|
|
||||||
|
|
||||||
ca = normalize(ca);
|
|
||||||
cb = normalize(cb);
|
|
||||||
|
|
||||||
ca = filterAllStopWords(ca);
|
|
||||||
cb = filterAllStopWords(cb);
|
|
||||||
|
|
||||||
Set<String> keywords1 = getKeywords(
|
|
||||||
ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
Set<String> keywords2 = getKeywords(
|
|
||||||
cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
|
|
||||||
Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
|
|
||||||
ca = removeKeywords(ca, keywords1);
|
|
||||||
ca = removeKeywords(ca, cities1);
|
|
||||||
cb = removeKeywords(cb, keywords2);
|
|
||||||
cb = removeKeywords(cb, cities2);
|
|
||||||
|
|
||||||
ca = ca.replaceAll("[ ]{2,}", " ");
|
|
||||||
cb = cb.replaceAll("[ ]{2,}", " ");
|
|
||||||
|
|
||||||
if (ca.isEmpty() && cb.isEmpty())
|
|
||||||
return 1.0;
|
|
||||||
else
|
|
||||||
return normalize(ssalgo.score(ca, cb));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double getWeight() {
|
|
||||||
return super.weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected double normalize(double d) {
|
|
||||||
return d;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,50 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.pace.tree;
|
|
||||||
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Set;
|
|
||||||
|
|
||||||
import eu.dnetlib.pace.config.Config;
|
|
||||||
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
|
|
||||||
import eu.dnetlib.pace.tree.support.ComparatorClass;
|
|
||||||
|
|
||||||
@ComparatorClass("keywordMatch")
|
|
||||||
public class KeywordMatch extends AbstractStringComparator {
|
|
||||||
|
|
||||||
Map<String, String> params;
|
|
||||||
|
|
||||||
public KeywordMatch(Map<String, String> params) {
|
|
||||||
super(params);
|
|
||||||
this.params = params;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public double distance(final String a, final String b, final Config conf) {
|
|
||||||
|
|
||||||
String ca = cleanup(a);
|
|
||||||
String cb = cleanup(b);
|
|
||||||
|
|
||||||
ca = normalize(ca);
|
|
||||||
cb = normalize(cb);
|
|
||||||
|
|
||||||
ca = filterAllStopWords(ca);
|
|
||||||
cb = filterAllStopWords(cb);
|
|
||||||
|
|
||||||
Set<String> keywords1 = getKeywords(
|
|
||||||
ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
Set<String> keywords2 = getKeywords(
|
|
||||||
cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
|
|
||||||
|
|
||||||
Set<String> codes1 = toCodes(keywords1, conf.translationMap());
|
|
||||||
Set<String> codes2 = toCodes(keywords2, conf.translationMap());
|
|
||||||
|
|
||||||
// if no cities are detected, the comparator gives 1.0
|
|
||||||
if (codes1.isEmpty() && codes2.isEmpty())
|
|
||||||
return 1.0;
|
|
||||||
else {
|
|
||||||
if (codes1.isEmpty() ^ codes2.isEmpty())
|
|
||||||
return -1.0; // undefined if one of the two has no keywords
|
|
||||||
return commonElementsPercentage(codes1, codes2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -48,7 +48,7 @@ public class TreeNodeDef implements Serializable {
|
||||||
// function for the evaluation of the node
|
// function for the evaluation of the node
|
||||||
public TreeNodeStats evaluate(Row doc1, Row doc2, Config conf) {
|
public TreeNodeStats evaluate(Row doc1, Row doc2, Config conf) {
|
||||||
|
|
||||||
TreeNodeStats stats = new TreeNodeStats();
|
TreeNodeStats stats = new TreeNodeStats(ignoreUndefined);
|
||||||
|
|
||||||
// for each field in the node, it computes the
|
// for each field in the node, it computes the
|
||||||
for (FieldConf fieldConf : fields) {
|
for (FieldConf fieldConf : fields) {
|
||||||
|
|
|
@ -9,8 +9,11 @@ public class TreeNodeStats implements Serializable {
|
||||||
|
|
||||||
private Map<String, FieldStats> results; // this is an accumulator for the results of the node
|
private Map<String, FieldStats> results; // this is an accumulator for the results of the node
|
||||||
|
|
||||||
public TreeNodeStats() {
|
private final boolean ignoreUndefined;
|
||||||
|
|
||||||
|
public TreeNodeStats(boolean ignoreUndefined) {
|
||||||
this.results = new HashMap<>();
|
this.results = new HashMap<>();
|
||||||
|
this.ignoreUndefined = ignoreUndefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, FieldStats> getResults() {
|
public Map<String, FieldStats> getResults() {
|
||||||
|
@ -22,7 +25,10 @@ public class TreeNodeStats implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int fieldsCount() {
|
public int fieldsCount() {
|
||||||
|
if (ignoreUndefined)
|
||||||
return this.results.size();
|
return this.results.size();
|
||||||
|
else
|
||||||
|
return this.results.size() - undefinedCount(); // do not count undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
public int undefinedCount() {
|
public int undefinedCount() {
|
||||||
|
@ -78,12 +84,23 @@ public class TreeNodeStats implements Serializable {
|
||||||
double min = 100.0; // random high value
|
double min = 100.0; // random high value
|
||||||
for (FieldStats fs : this.results.values()) {
|
for (FieldStats fs : this.results.values()) {
|
||||||
if (fs.getResult() < min) {
|
if (fs.getResult() < min) {
|
||||||
if (fs.getResult() >= 0.0 || (fs.getResult() == -1 && fs.isCountIfUndefined()))
|
if (fs.getResult() == -1) {
|
||||||
|
if (fs.isCountIfUndefined()) {
|
||||||
|
min = 0.0;
|
||||||
|
} else {
|
||||||
|
min = -1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
min = fs.getResult();
|
min = fs.getResult();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if (ignoreUndefined) {
|
||||||
|
return min == -1.0 ? 0.0 : min;
|
||||||
|
} else {
|
||||||
return min;
|
return min;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// if at least one is true, return 1.0
|
// if at least one is true, return 1.0
|
||||||
public double or() {
|
public double or() {
|
||||||
|
@ -91,8 +108,12 @@ public class TreeNodeStats implements Serializable {
|
||||||
if (fieldStats.getResult() >= fieldStats.getThreshold())
|
if (fieldStats.getResult() >= fieldStats.getThreshold())
|
||||||
return 1.0;
|
return 1.0;
|
||||||
}
|
}
|
||||||
|
if (!ignoreUndefined && undefinedCount() > 0) {
|
||||||
|
return -1.0;
|
||||||
|
} else {
|
||||||
return 0.0;
|
return 0.0;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// if at least one is false, return 0.0
|
// if at least one is false, return 0.0
|
||||||
public double and() {
|
public double and() {
|
||||||
|
@ -100,7 +121,7 @@ public class TreeNodeStats implements Serializable {
|
||||||
|
|
||||||
if (fieldStats.getResult() == -1) {
|
if (fieldStats.getResult() == -1) {
|
||||||
if (fieldStats.isCountIfUndefined())
|
if (fieldStats.isCountIfUndefined())
|
||||||
return 0.0;
|
return ignoreUndefined ? 0.0 : -1.0;
|
||||||
} else {
|
} else {
|
||||||
if (fieldStats.getResult() < fieldStats.getThreshold())
|
if (fieldStats.getResult() < fieldStats.getThreshold())
|
||||||
return 0.0;
|
return 0.0;
|
||||||
|
|
|
@ -44,12 +44,10 @@ public class TreeProcessor {
|
||||||
TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config);
|
TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config);
|
||||||
treeStats.addNodeStats(nextNodeName, stats);
|
treeStats.addNodeStats(nextNodeName, stats);
|
||||||
|
|
||||||
// if ignoreUndefined=false the miss is considered as undefined
|
double finalScore = stats.getFinalScore(currentNode.getAggregation());
|
||||||
if (!currentNode.isIgnoreUndefined() && stats.undefinedCount() > 0) {
|
if (finalScore == -1.0)
|
||||||
nextNodeName = currentNode.getUndefined();
|
nextNodeName = currentNode.getUndefined();
|
||||||
}
|
else if (finalScore >= currentNode.getThreshold()) {
|
||||||
// if ignoreUndefined=true the miss is ignored and the score computed anyway
|
|
||||||
else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) {
|
|
||||||
nextNodeName = currentNode.getPositive();
|
nextNodeName = currentNode.getPositive();
|
||||||
} else {
|
} else {
|
||||||
nextNodeName = currentNode.getNegative();
|
nextNodeName = currentNode.getNegative();
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,12 @@
|
||||||
|
package eu.dnetlib.pace.util
|
||||||
|
|
||||||
|
import org.apache.spark.sql.Row
|
||||||
|
import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, RowEncoder}
|
||||||
|
import org.apache.spark.sql.types.StructType
|
||||||
|
|
||||||
|
object SparkCompatUtils {
|
||||||
|
|
||||||
|
def encoderFor(schema: StructType): ExpressionEncoder[Row] = {
|
||||||
|
RowEncoder(schema)
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,12 @@
|
||||||
|
package eu.dnetlib.pace.util
|
||||||
|
|
||||||
|
import org.apache.spark.sql.Row
|
||||||
|
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
|
||||||
|
import org.apache.spark.sql.types.StructType
|
||||||
|
|
||||||
|
object SparkCompatUtils {
|
||||||
|
|
||||||
|
def encoderFor(schema: StructType): ExpressionEncoder[Row] = {
|
||||||
|
ExpressionEncoder(schema)
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,6 +8,7 @@ import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
import com.mongodb.connection.Cluster;
|
||||||
|
|
||||||
import eu.dnetlib.pace.AbstractPaceTest;
|
import eu.dnetlib.pace.AbstractPaceTest;
|
||||||
import eu.dnetlib.pace.common.AbstractPaceFunctions;
|
import eu.dnetlib.pace.common.AbstractPaceFunctions;
|
||||||
|
@ -177,41 +178,16 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testKeywordsClustering() {
|
public void legalnameClustering() {
|
||||||
|
|
||||||
final ClusteringFunction cf = new KeywordsClustering(params);
|
final ClusteringFunction cf = new LegalnameClustering(params);
|
||||||
final String s = "Polytechnic University of Turin";
|
String s = "key::1 key::2 city::1";
|
||||||
System.out.println(s);
|
System.out.println(s);
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s)));
|
System.out.println(cf.apply(conf, Lists.newArrayList(s)));
|
||||||
|
|
||||||
final String s1 = "POLITECNICO DI TORINO";
|
s = "key::1 key::2 city::1 city::2";
|
||||||
System.out.println(s1);
|
System.out.println(s);
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s1)));
|
System.out.println(cf.apply(conf, Lists.newArrayList(s)));
|
||||||
|
|
||||||
final String s2 = "Universita farmaceutica culturale di milano bergamo";
|
|
||||||
System.out.println("s2 = " + s2);
|
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s2)));
|
|
||||||
|
|
||||||
final String s3 = "universita universita milano milano";
|
|
||||||
System.out.println("s3 = " + s3);
|
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s3)));
|
|
||||||
|
|
||||||
final String s4 = "Politechniki Warszawskiej (Warsaw University of Technology)";
|
|
||||||
System.out.println("s4 = " + s4);
|
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s4)));
|
|
||||||
|
|
||||||
final String s5 = "İstanbul Ticarət Universiteti";
|
|
||||||
System.out.println("s5 = " + s5);
|
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s5)));
|
|
||||||
|
|
||||||
final String s6 = "National and Kapodistrian University of Athens";
|
|
||||||
System.out.println("s6 = " + s6);
|
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s6)));
|
|
||||||
|
|
||||||
final String s7 = "Εθνικό και Καποδιστριακό Πανεπιστήμιο Αθηνών";
|
|
||||||
System.out.println("s7 = " + s7);
|
|
||||||
System.out.println(cf.apply(conf, Lists.newArrayList(s7)));
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -54,4 +54,47 @@ public class PaceFunctionTest extends AbstractPaceFunctions {
|
||||||
System.out.println("Fixed aliases : " + fixAliases(TEST_STRING));
|
System.out.println("Fixed aliases : " + fixAliases(TEST_STRING));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void countryInferenceTest() {
|
||||||
|
assertEquals("IT", countryInference("UNKNOWN", "Università di Bologna"));
|
||||||
|
assertEquals("UK", countryInference("UK", "Università di Bologna"));
|
||||||
|
assertEquals("IT", countryInference("UNKNOWN", "Universiteé de Naples"));
|
||||||
|
assertEquals("UNKNOWN", countryInference("UNKNOWN", "Università del Lavoro"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void cityInferenceTest() {
|
||||||
|
assertEquals("universita city::3181928", cityInference("Università di Bologna"));
|
||||||
|
assertEquals("university city::3170647", cityInference("University of Pisa"));
|
||||||
|
assertEquals("universita", cityInference("Università del lavoro"));
|
||||||
|
assertEquals("universita city::3173331 city::3169522", cityInference("Università di Modena e Reggio Emilia"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void keywordInferenceTest() {
|
||||||
|
assertEquals("key::41 turin", keywordInference("Polytechnic University of Turin"));
|
||||||
|
assertEquals("key::41 torino", keywordInference("POLITECNICO DI TORINO"));
|
||||||
|
assertEquals(
|
||||||
|
"key::1 key::60 key::81 milano bergamo",
|
||||||
|
keywordInference("Universita farmaceutica culturale di milano bergamo"));
|
||||||
|
assertEquals("key::1 key::1 milano milano", keywordInference("universita universita milano milano"));
|
||||||
|
assertEquals(
|
||||||
|
"key::10 kapodistriako panepistemio athenon",
|
||||||
|
keywordInference("Εθνικό και Καποδιστριακό Πανεπιστήμιο Αθηνών"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void cityKeywordInferenceTest() {
|
||||||
|
assertEquals("key::41 city::3165524", cityKeywordInference("Polytechnic University of Turin"));
|
||||||
|
assertEquals("key::41 city::3165524", cityKeywordInference("POLITECNICO DI TORINO"));
|
||||||
|
assertEquals(
|
||||||
|
"key::1 key::60 key::81 city::3173435 city::3182164",
|
||||||
|
cityKeywordInference("Universita farmaceutica culturale di milano bergamo"));
|
||||||
|
assertEquals(
|
||||||
|
"key::1 key::1 city::3173435 city::3173435", cityKeywordInference("universita universita milano milano"));
|
||||||
|
assertEquals(
|
||||||
|
"key::10 kapodistriako panepistemio city::264371",
|
||||||
|
cityKeywordInference("Εθνικό και Καποδιστριακό Πανεπιστήμιο Αθηνών"));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,6 +35,7 @@ public class ComparatorTest extends AbstractPaceTest {
|
||||||
params.put("name_th", "0.95");
|
params.put("name_th", "0.95");
|
||||||
params.put("jpath_value", "$.value");
|
params.put("jpath_value", "$.value");
|
||||||
params.put("jpath_classid", "$.qualifier.classid");
|
params.put("jpath_classid", "$.qualifier.classid");
|
||||||
|
params.put("codeRegex", "key::\\d+");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -44,52 +45,23 @@ public class ComparatorTest extends AbstractPaceTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void cityMatchTest() {
|
public void codeMatchTest() {
|
||||||
final CityMatch cityMatch = new CityMatch(params);
|
CodeMatch codeMatch = new CodeMatch(params);
|
||||||
|
|
||||||
// both names with no cities
|
// both names with no codes
|
||||||
assertEquals(1.0, cityMatch.distance("Università", "Centro di ricerca", conf));
|
assertEquals(1.0, codeMatch.distance("testing1", "testing2", conf));
|
||||||
|
|
||||||
// one of the two names with no cities
|
// one of the two names with no codes
|
||||||
assertEquals(-1.0, cityMatch.distance("Università di Bologna", "Centro di ricerca", conf));
|
assertEquals(-1.0, codeMatch.distance("testing1 key::1", "testing", conf));
|
||||||
|
|
||||||
// both names with cities (same)
|
// both names with codes (same)
|
||||||
assertEquals(1.0, cityMatch.distance("Universita di Bologna", "Biblioteca di Bologna", conf));
|
assertEquals(1.0, codeMatch.distance("testing1 key::1", "testing2 key::1", conf));
|
||||||
|
|
||||||
// both names with cities (different)
|
// both names with codes (different)
|
||||||
assertEquals(0.0, cityMatch.distance("Universita di Bologna", "Universita di Torino", conf));
|
assertEquals(0.0, codeMatch.distance("testing1 key::1", "testing2 key::2", conf));
|
||||||
assertEquals(0.0, cityMatch.distance("Franklin College", "Concordia College", conf));
|
|
||||||
|
|
||||||
// particular cases
|
// both names with codes (1 same, 1 different)
|
||||||
assertEquals(1.0, cityMatch.distance("Free University of Bozen-Bolzano", "Università di Bolzano", conf));
|
assertEquals(0.5, codeMatch.distance("key::1 key::2 testing1", "key::1 testing", conf));
|
||||||
assertEquals(
|
|
||||||
1.0,
|
|
||||||
cityMatch
|
|
||||||
.distance(
|
|
||||||
"Politechniki Warszawskiej (Warsaw University of Technology)", "Warsaw University of Technology",
|
|
||||||
conf));
|
|
||||||
|
|
||||||
// failing becasuse 'Allen' is a transliterrated greek stopword
|
|
||||||
// assertEquals(-1.0, cityMatch.distance("Allen (United States)", "United States Military Academy", conf));
|
|
||||||
assertEquals(-1.0, cityMatch.distance("Washington (United States)", "United States Military Academy", conf));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void keywordMatchTest() {
|
|
||||||
params.put("threshold", "0.5");
|
|
||||||
|
|
||||||
final KeywordMatch keywordMatch = new KeywordMatch(params);
|
|
||||||
|
|
||||||
assertEquals(
|
|
||||||
0.5, keywordMatch.distance("Biblioteca dell'Universita di Bologna", "Università di Bologna", conf));
|
|
||||||
assertEquals(1.0, keywordMatch.distance("Universita degli studi di Pisa", "Universita di Pisa", conf));
|
|
||||||
assertEquals(1.0, keywordMatch.distance("Polytechnic University of Turin", "POLITECNICO DI TORINO", conf));
|
|
||||||
assertEquals(1.0, keywordMatch.distance("Istanbul Commerce University", "İstanbul Ticarət Universiteti", conf));
|
|
||||||
assertEquals(1.0, keywordMatch.distance("Franklin College", "Concordia College", conf));
|
|
||||||
assertEquals(2.0 / 3.0, keywordMatch.distance("University of Georgia", "Georgia State University", conf));
|
|
||||||
assertEquals(0.5, keywordMatch.distance("University College London", "University of London", conf));
|
|
||||||
assertEquals(0.5, keywordMatch.distance("Washington State University", "University of Washington", conf));
|
|
||||||
assertEquals(-1.0, keywordMatch.distance("Allen (United States)", "United States Military Academy", conf));
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,15 +127,15 @@ public class ComparatorTest extends AbstractPaceTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void jaroWinklerNormalizedNameTest() {
|
public void jaroWinklerLegalnameTest() {
|
||||||
|
|
||||||
final JaroWinklerNormalizedName jaroWinklerNormalizedName = new JaroWinklerNormalizedName(params);
|
final JaroWinklerLegalname jaroWinklerLegalname = new JaroWinklerLegalname(params);
|
||||||
|
|
||||||
double result = jaroWinklerNormalizedName
|
double result = jaroWinklerLegalname
|
||||||
.distance("AT&T (United States)", "United States Military Academy", conf);
|
.distance("AT&T (United States)", "United States key::2 key::1", conf);
|
||||||
System.out.println("result = " + result);
|
System.out.println("result = " + result);
|
||||||
|
|
||||||
result = jaroWinklerNormalizedName.distance("NOAA - Servicio Meteorol\\u00f3gico Nacional", "NOAA - NWS", conf);
|
result = jaroWinklerLegalname.distance("NOAA - Servicio Meteorol\\u00f3gico Nacional", "NOAA - NWS", conf);
|
||||||
System.out.println("result = " + result);
|
System.out.println("result = " + result);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -344,13 +316,13 @@ public class ComparatorTest extends AbstractPaceTest {
|
||||||
double result = countryMatch.distance("UNKNOWN", "UNKNOWN", conf);
|
double result = countryMatch.distance("UNKNOWN", "UNKNOWN", conf);
|
||||||
assertEquals(-1.0, result);
|
assertEquals(-1.0, result);
|
||||||
|
|
||||||
result = countryMatch.distance("CHILE", "UNKNOWN", conf);
|
result = countryMatch.distance("CL", "UNKNOWN", conf);
|
||||||
assertEquals(-1.0, result);
|
assertEquals(-1.0, result);
|
||||||
|
|
||||||
result = countryMatch.distance("CHILE", "ITALY", conf);
|
result = countryMatch.distance("CL", "IT", conf);
|
||||||
assertEquals(0.0, result);
|
assertEquals(0.0, result);
|
||||||
|
|
||||||
result = countryMatch.distance("CHILE", "CHILE", conf);
|
result = countryMatch.distance("CL", "CL", conf);
|
||||||
assertEquals(1.0, result);
|
assertEquals(1.0, result);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,6 +11,7 @@ import org.junit.jupiter.api.Disabled;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import eu.dnetlib.pace.model.Person;
|
import eu.dnetlib.pace.model.Person;
|
||||||
|
import jdk.nashorn.internal.ir.annotations.Ignore;
|
||||||
|
|
||||||
public class UtilTest {
|
public class UtilTest {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,169 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<parent>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp</artifactId>
|
||||||
|
<version>1.2.5-SNAPSHOT</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<artifactId>dhp-shade-package</artifactId>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<distributionManagement>
|
||||||
|
<site>
|
||||||
|
<id>DHPSite</id>
|
||||||
|
<url>${dhp.site.stage.path}/dhp-common</url>
|
||||||
|
</site>
|
||||||
|
</distributionManagement>
|
||||||
|
|
||||||
|
<description>This module create a jar of all module dependencies</description>
|
||||||
|
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-actionmanager</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>eu.dnetlib.dhp</groupId>-->
|
||||||
|
<!-- <artifactId>dhp-aggregation</artifactId>-->
|
||||||
|
<!-- <version>${project.version}</version>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>eu.dnetlib.dhp</groupId>-->
|
||||||
|
<!-- <artifactId>dhp-blacklist</artifactId>-->
|
||||||
|
<!-- <version>${project.version}</version>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>eu.dnetlib.dhp</groupId>-->
|
||||||
|
<!-- <artifactId>dhp-broker-events</artifactId>-->
|
||||||
|
<!-- <version>${project.version}</version>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>eu.dnetlib.dhp</groupId>-->
|
||||||
|
<!-- <artifactId>dhp-dedup-openaire</artifactId>-->
|
||||||
|
<!-- <version>${project.version}</version>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
<!-- <dependency>-->
|
||||||
|
<!-- <groupId>eu.dnetlib.dhp</groupId>-->
|
||||||
|
<!-- <artifactId>dhp-enrichment</artifactId>-->
|
||||||
|
<!-- <version>${project.version}</version>-->
|
||||||
|
<!-- </dependency>-->
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-graph-mapper</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-graph-provision</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-impact-indicators</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-stats-actionsets</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-stats-hist-snaps</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-stats-monitor-irish</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-stats-promote</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-stats-update</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-swh</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-usage-raw-data-update</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-usage-stats-build</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>shade</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<transformers>
|
||||||
|
<transformer
|
||||||
|
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
|
||||||
|
<mainClass>eu.dnetlib.dhp.oa.dedup.SparkCreateSimRels</mainClass>
|
||||||
|
</transformer>
|
||||||
|
<!-- This is needed if you have dependencies that use Service Loader. Most Google Cloud client libraries do. -->
|
||||||
|
<transformer
|
||||||
|
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
|
||||||
|
<transformer
|
||||||
|
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
|
||||||
|
<resource>META-INF/cxf/bus-extensions.txt</resource>
|
||||||
|
</transformer>
|
||||||
|
</transformers>
|
||||||
|
<filters>
|
||||||
|
<filter>
|
||||||
|
<artifact>*:*</artifact>
|
||||||
|
<excludes>
|
||||||
|
<exclude>META-INF/maven/**</exclude>
|
||||||
|
<exclude>META-INF/*.SF</exclude>
|
||||||
|
<exclude>META-INF/*.DSA</exclude>
|
||||||
|
<exclude>META-INF/*.RSA</exclude>
|
||||||
|
</excludes>
|
||||||
|
</filter>
|
||||||
|
</filters>
|
||||||
|
<relocations>
|
||||||
|
<relocation>
|
||||||
|
<pattern>com</pattern>
|
||||||
|
<shadedPattern>repackaged.com.google.common</shadedPattern>
|
||||||
|
<includes>
|
||||||
|
<include>com.google.common.**</include>
|
||||||
|
</includes>
|
||||||
|
</relocation>
|
||||||
|
</relocations>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
</project>
|
|
@ -51,48 +51,5 @@
|
||||||
<artifactId>hadoop-distcp</artifactId>
|
<artifactId>hadoop-distcp</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>eu.dnetlib</groupId>
|
|
||||||
<artifactId>dnet-actionmanager-api</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>eu.dnetlib</groupId>
|
|
||||||
<artifactId>dnet-actionmanager-common</artifactId>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>eu.dnetlib</groupId>
|
|
||||||
<artifactId>dnet-openaireplus-mapping-utils</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>saxonica</groupId>
|
|
||||||
<artifactId>saxon</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>saxonica</groupId>
|
|
||||||
<artifactId>saxon-dom</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>jgrapht</groupId>
|
|
||||||
<artifactId>jgrapht</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>net.sf.ehcache</groupId>
|
|
||||||
<artifactId>ehcache</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.springframework</groupId>
|
|
||||||
<artifactId>spring-test</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.apache.*</groupId>
|
|
||||||
<artifactId>*</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>apache</groupId>
|
|
||||||
<artifactId>*</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -4,7 +4,6 @@ package eu.dnetlib.dhp.actionmanager;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
@ -22,7 +21,6 @@ import com.google.common.base.Splitter;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.common.collect.Sets;
|
import com.google.common.collect.Sets;
|
||||||
|
|
||||||
import eu.dnetlib.actionmanager.rmi.ActionManagerException;
|
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
@ -65,7 +63,7 @@ public class ISClient implements Serializable {
|
||||||
.map(t -> buildDirectory(basePath, t))
|
.map(t -> buildDirectory(basePath, t))
|
||||||
.collect(Collectors.toList()))
|
.collect(Collectors.toList()))
|
||||||
.orElseThrow(() -> new IllegalStateException("empty set list"));
|
.orElseThrow(() -> new IllegalStateException("empty set list"));
|
||||||
} catch (ActionManagerException | ISLookUpException e) {
|
} catch (ISLookUpException e) {
|
||||||
throw new IllegalStateException("unable to query ActionSets info from the IS");
|
throw new IllegalStateException("unable to query ActionSets info from the IS");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -89,31 +87,18 @@ public class ISClient implements Serializable {
|
||||||
return Joiner.on("/").join(basePath, t.getMiddle(), t.getRight());
|
return Joiner.on("/").join(basePath, t.getMiddle(), t.getRight());
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getBasePathHDFS(ISLookUpService isLookup) throws ActionManagerException {
|
private String getBasePathHDFS(ISLookUpService isLookup) throws ISLookUpException {
|
||||||
return queryServiceProperty(isLookup, "basePath");
|
return queryServiceProperty(isLookup, "basePath");
|
||||||
}
|
}
|
||||||
|
|
||||||
private String queryServiceProperty(ISLookUpService isLookup, final String propertyName)
|
private String queryServiceProperty(ISLookUpService isLookup, final String propertyName)
|
||||||
throws ActionManagerException {
|
throws ISLookUpException {
|
||||||
final String q = "for $x in /RESOURCE_PROFILE[.//RESOURCE_TYPE/@value='ActionManagerServiceResourceType'] return $x//SERVICE_PROPERTIES/PROPERTY[./@ key='"
|
final String q = "for $x in /RESOURCE_PROFILE[.//RESOURCE_TYPE/@value='ActionManagerServiceResourceType'] return $x//SERVICE_PROPERTIES/PROPERTY[./@ key='"
|
||||||
+ propertyName
|
+ propertyName
|
||||||
+ "']/@value/string()";
|
+ "']/@value/string()";
|
||||||
log.debug("quering for service property: {}", q);
|
log.debug("quering for service property: {}", q);
|
||||||
try {
|
|
||||||
final List<String> value = isLookup.quickSearchProfile(q);
|
final List<String> value = isLookup.quickSearchProfile(q);
|
||||||
return Iterables.getOnlyElement(value);
|
return Iterables.getOnlyElement(value);
|
||||||
} catch (ISLookUpException e) {
|
|
||||||
String msg = "Error accessing service profile, using query: " + q;
|
|
||||||
log.error(msg, e);
|
|
||||||
throw new ActionManagerException(msg, e);
|
|
||||||
} catch (NoSuchElementException e) {
|
|
||||||
String msg = "missing service property: " + propertyName;
|
|
||||||
log.error(msg, e);
|
|
||||||
throw new ActionManagerException(msg, e);
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
String msg = "found more than one service property: " + propertyName;
|
|
||||||
log.error(msg, e);
|
|
||||||
throw new ActionManagerException(msg, e);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.compress.BZip2Codec;
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
|
|
@ -10,6 +10,7 @@ import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
@ -83,7 +84,7 @@ public class SparkAtomicActionScoreJob implements Serializable {
|
||||||
resultsRDD
|
resultsRDD
|
||||||
.union(projectsRDD)
|
.union(projectsRDD)
|
||||||
.saveAsHadoopFile(
|
.saveAsHadoopFile(
|
||||||
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,26 +6,23 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.api.java.function.MapGroupsFunction;
|
import org.apache.spark.api.java.function.MapGroupsFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.*;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import org.apache.spark.sql.SaveMode;
|
|
||||||
import org.apache.spark.sql.SparkSession;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.actionmanager.createunresolvedentities.model.SDGDataModel;
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Subject;
|
import eu.dnetlib.dhp.schema.oaf.Subject;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
|
||||||
public class PrepareSDGSparkJob implements Serializable {
|
public class PrepareSDGSparkJob implements Serializable {
|
||||||
|
@ -52,42 +49,91 @@ public class PrepareSDGSparkJob implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
final Boolean distributeDOI = Optional
|
||||||
|
.ofNullable(parser.get("distributeDoi"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("distribute doi {}", distributeDOI);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
|
if (distributeDOI)
|
||||||
doPrepare(
|
doPrepare(
|
||||||
spark,
|
spark,
|
||||||
sourcePath,
|
sourcePath,
|
||||||
|
|
||||||
outputPath);
|
outputPath);
|
||||||
|
else
|
||||||
|
doPrepareoaid(spark, sourcePath, outputPath);
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void doPrepare(SparkSession spark, String sourcePath, String outputPath) {
|
private static void doPrepare(SparkSession spark, String sourcePath, String outputPath) {
|
||||||
Dataset<SDGDataModel> sdgDataset = readPath(spark, sourcePath, SDGDataModel.class);
|
Dataset<Row> sdgDataset = spark
|
||||||
|
.read()
|
||||||
|
.format("csv")
|
||||||
|
.option("sep", DEFAULT_DELIMITER)
|
||||||
|
.option("inferSchema", "true")
|
||||||
|
.option("header", "true")
|
||||||
|
.option("quotes", "\"")
|
||||||
|
.load(sourcePath);
|
||||||
|
|
||||||
sdgDataset
|
sdgDataset
|
||||||
.groupByKey((MapFunction<SDGDataModel, String>) r -> r.getDoi().toLowerCase(), Encoders.STRING())
|
.groupByKey((MapFunction<Row, String>) v -> ((String) v.getAs("doi")).toLowerCase(), Encoders.STRING())
|
||||||
.mapGroups((MapGroupsFunction<String, SDGDataModel, Result>) (k, it) -> {
|
.mapGroups(
|
||||||
Result r = new Result();
|
(MapGroupsFunction<String, Row, Result>) (k,
|
||||||
r.setId(DHPUtils.generateUnresolvedIdentifier(k, DOI));
|
it) -> getResult(
|
||||||
SDGDataModel first = it.next();
|
DHPUtils
|
||||||
List<Subject> sbjs = new ArrayList<>();
|
.generateUnresolvedIdentifier(
|
||||||
sbjs.add(getSubject(first.getSbj(), SDG_CLASS_ID, SDG_CLASS_NAME, UPDATE_SUBJECT_SDG_CLASS_ID));
|
ModelSupport.entityIdPrefix.get(Result.class.getSimpleName().toLowerCase()) + "|" + k,
|
||||||
it
|
DOI),
|
||||||
.forEachRemaining(
|
it),
|
||||||
s -> sbjs
|
Encoders.bean(Result.class))
|
||||||
.add(getSubject(s.getSbj(), SDG_CLASS_ID, SDG_CLASS_NAME, UPDATE_SUBJECT_SDG_CLASS_ID)));
|
|
||||||
r.setSubject(sbjs);
|
|
||||||
|
|
||||||
return r;
|
|
||||||
}, Encoders.bean(Result.class))
|
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath + "/sdg");
|
.json(outputPath + "/sdg");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void doPrepareoaid(SparkSession spark, String sourcePath, String outputPath) {
|
||||||
|
Dataset<Row> sdgDataset = spark
|
||||||
|
.read()
|
||||||
|
.format("csv")
|
||||||
|
.option("sep", DEFAULT_DELIMITER)
|
||||||
|
.option("inferSchema", "true")
|
||||||
|
.option("header", "true")
|
||||||
|
.option("quotes", "\"")
|
||||||
|
.load(sourcePath);
|
||||||
|
;
|
||||||
|
|
||||||
|
sdgDataset
|
||||||
|
.groupByKey((MapFunction<Row, String>) r -> "50|" + ((String) r.getAs("oaid")), Encoders.STRING())
|
||||||
|
.mapGroups(
|
||||||
|
(MapGroupsFunction<String, Row, Result>) PrepareSDGSparkJob::getResult, Encoders.bean(Result.class))
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.json(outputPath + "/sdg");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static @NotNull Result getResult(String id, Iterator<Row> it) {
|
||||||
|
Result r = new Result();
|
||||||
|
r.setId(id);
|
||||||
|
Row first = it.next();
|
||||||
|
List<Subject> sbjs = new ArrayList<>();
|
||||||
|
sbjs.add(getSubject(first.getAs("sdg"), SDG_CLASS_ID, SDG_CLASS_NAME, UPDATE_SUBJECT_SDG_CLASS_ID));
|
||||||
|
it
|
||||||
|
.forEachRemaining(
|
||||||
|
s -> sbjs
|
||||||
|
.add(getSubject(s.getAs("sdg"), SDG_CLASS_ID, SDG_CLASS_NAME, UPDATE_SUBJECT_SDG_CLASS_ID)));
|
||||||
|
r.setSubject(sbjs);
|
||||||
|
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,9 +13,6 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
|
||||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
@ -24,13 +21,9 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.*;
|
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class CreateActionSetSparkJob implements Serializable {
|
public class CreateActionSetSparkJob implements Serializable {
|
||||||
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.personentity;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Person;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
|
||||||
|
public class CoAuthorshipIterator implements Iterator<Relation> {
|
||||||
|
private int firstIndex;
|
||||||
|
private int secondIndex;
|
||||||
|
private boolean firstRelation;
|
||||||
|
private List<String> authors;
|
||||||
|
private static final String PERSON_PREFIX = ModelSupport.getIdPrefix(Person.class) + "|orcid_______::";
|
||||||
|
private static final String OPENAIRE_PREFIX = "openaire____";
|
||||||
|
private static final String SEPARATOR = "::";
|
||||||
|
private static final String ORCID_KEY = "10|" + OPENAIRE_PREFIX + SEPARATOR
|
||||||
|
+ DHPUtils.md5(ModelConstants.ORCID.toLowerCase());
|
||||||
|
public static final String ORCID_AUTHORS_CLASSID = "sysimport:crosswalk:orcid";
|
||||||
|
public static final String ORCID_AUTHORS_CLASSNAME = "Imported from ORCID";
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasNext() {
|
||||||
|
return firstIndex < authors.size() - 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Relation next() {
|
||||||
|
Relation rel = null;
|
||||||
|
if (firstRelation) {
|
||||||
|
rel = getRelation(authors.get(firstIndex), authors.get(secondIndex));
|
||||||
|
firstRelation = Boolean.FALSE;
|
||||||
|
} else {
|
||||||
|
rel = getRelation(authors.get(secondIndex), authors.get(firstIndex));
|
||||||
|
firstRelation = Boolean.TRUE;
|
||||||
|
secondIndex += 1;
|
||||||
|
if (secondIndex >= authors.size()) {
|
||||||
|
firstIndex += 1;
|
||||||
|
secondIndex = firstIndex + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return rel;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CoAuthorshipIterator(List<String> authors) {
|
||||||
|
this.authors = authors;
|
||||||
|
this.firstIndex = 0;
|
||||||
|
this.secondIndex = 1;
|
||||||
|
this.firstRelation = Boolean.TRUE;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private Relation getRelation(String orcid1, String orcid2) {
|
||||||
|
String source = PERSON_PREFIX + IdentifierFactory.md5(orcid1);
|
||||||
|
String target = PERSON_PREFIX + IdentifierFactory.md5(orcid2);
|
||||||
|
return OafMapperUtils
|
||||||
|
.getRelation(
|
||||||
|
source, target, ModelConstants.PERSON_PERSON_RELTYPE,
|
||||||
|
ModelConstants.PERSON_PERSON_SUBRELTYPE,
|
||||||
|
ModelConstants.PERSON_PERSON_HASCOAUTHORED,
|
||||||
|
Arrays.asList(OafMapperUtils.keyValue(ORCID_KEY, ModelConstants.ORCID_DS)),
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
ORCID_AUTHORS_CLASSID, ORCID_AUTHORS_CLASSNAME,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.91"),
|
||||||
|
null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,20 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.personentity;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
|
||||||
|
public class Coauthors implements Serializable {
|
||||||
|
private List<String> coauthors;
|
||||||
|
|
||||||
|
public List<String> getCoauthors() {
|
||||||
|
return coauthors;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCoauthors(List<String> coauthors) {
|
||||||
|
this.coauthors = coauthors;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,40 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.personentity;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Person;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class Couples implements Serializable {
|
||||||
|
Person p;
|
||||||
|
Relation r;
|
||||||
|
|
||||||
|
public Couples() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public Person getP() {
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setP(Person p) {
|
||||||
|
this.p = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Relation getR() {
|
||||||
|
return r;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setR(Relation r) {
|
||||||
|
this.r = r;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <Tuples> Couples newInstance(Tuple2<Person, Relation> couple) {
|
||||||
|
Couples c = new Couples();
|
||||||
|
c.p = couple._1();
|
||||||
|
c.r = couple._2();
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,437 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.personentity;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static org.apache.spark.sql.functions.*;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.*;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.jetbrains.annotations.NotNull;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.spark_project.jetty.util.StringUtil;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.collection.orcid.model.Author;
|
||||||
|
import eu.dnetlib.dhp.collection.orcid.model.Employment;
|
||||||
|
import eu.dnetlib.dhp.collection.orcid.model.Work;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Person;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidType;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class ExtractPerson implements Serializable {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(ExtractPerson.class);
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
private static final String OPENAIRE_PREFIX = "openaire____";
|
||||||
|
private static final String SEPARATOR = "::";
|
||||||
|
private static final String orcidKey = "10|" + OPENAIRE_PREFIX + SEPARATOR
|
||||||
|
+ DHPUtils.md5(ModelConstants.ORCID.toLowerCase());
|
||||||
|
|
||||||
|
private static final String DOI_PREFIX = "50|doi_________::";
|
||||||
|
|
||||||
|
private static final String PMID_PREFIX = "50|pmid________::";
|
||||||
|
private static final String ARXIV_PREFIX = "50|arXiv_______::";
|
||||||
|
|
||||||
|
private static final String PMCID_PREFIX = "50|pmcid_______::";
|
||||||
|
private static final String ROR_PREFIX = "20|ror_________::";
|
||||||
|
private static final String PERSON_PREFIX = ModelSupport.getIdPrefix(Person.class) + "|orcid_______";
|
||||||
|
public static final String ORCID_AUTHORS_CLASSID = "sysimport:crosswalk:orcid";
|
||||||
|
public static final String ORCID_AUTHORS_CLASSNAME = "Imported from ORCID";
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
ExtractPerson.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/personentity/as_parameters.json"))));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("inputPath");
|
||||||
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
|
final String workingDir = parser.get("workingDir");
|
||||||
|
log.info("workingDir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
||||||
|
createActionSet(spark, inputPath, outputPath, workingDir);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void createActionSet(SparkSession spark, String inputPath, String outputPath, String workingDir) {
|
||||||
|
|
||||||
|
Dataset<Author> authors = spark
|
||||||
|
.read()
|
||||||
|
.parquet(inputPath + "Authors")
|
||||||
|
.as(Encoders.bean(Author.class));
|
||||||
|
|
||||||
|
Dataset<Work> works = spark
|
||||||
|
.read()
|
||||||
|
.parquet(inputPath + "Works")
|
||||||
|
.as(Encoders.bean(Work.class))
|
||||||
|
.filter(
|
||||||
|
(FilterFunction<Work>) w -> Optional.ofNullable(w.getPids()).isPresent() &&
|
||||||
|
w
|
||||||
|
.getPids()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
p -> p.getSchema().equalsIgnoreCase("doi") ||
|
||||||
|
p.getSchema().equalsIgnoreCase("pmc") ||
|
||||||
|
p.getSchema().equalsIgnoreCase("pmid") ||
|
||||||
|
p.getSchema().equalsIgnoreCase("arxiv")));
|
||||||
|
|
||||||
|
Dataset<Employment> employmentDataset = spark
|
||||||
|
.read()
|
||||||
|
.parquet(inputPath + "Employments")
|
||||||
|
.as(Encoders.bean(Employment.class));
|
||||||
|
|
||||||
|
Dataset<Author> peopleToMap = authors
|
||||||
|
.joinWith(works, authors.col("orcid").equalTo(works.col("orcid")))
|
||||||
|
.map((MapFunction<Tuple2<Author, Work>, Author>) t2 -> t2._1(), Encoders.bean(Author.class))
|
||||||
|
.groupByKey((MapFunction<Author, String>) a -> a.getOrcid(), Encoders.STRING())
|
||||||
|
.mapGroups((MapGroupsFunction<String, Author, Author>) (k, it) -> it.next(), Encoders.bean(Author.class));
|
||||||
|
|
||||||
|
Dataset<Employment> employment = employmentDataset
|
||||||
|
.joinWith(peopleToMap, employmentDataset.col("orcid").equalTo(peopleToMap.col("orcid")))
|
||||||
|
.map((MapFunction<Tuple2<Employment, Author>, Employment>) t2 -> t2._1(), Encoders.bean(Employment.class));
|
||||||
|
|
||||||
|
Dataset<Person> people;
|
||||||
|
peopleToMap.map((MapFunction<Author, Person>) op -> {
|
||||||
|
Person person = new Person();
|
||||||
|
person.setId(DHPUtils.generateIdentifier(op.getOrcid(), PERSON_PREFIX));
|
||||||
|
person
|
||||||
|
.setBiography(
|
||||||
|
Optional
|
||||||
|
.ofNullable(op.getBiography())
|
||||||
|
|
||||||
|
.orElse(""));
|
||||||
|
KeyValue kv = OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS);
|
||||||
|
kv.setDataInfo(null);
|
||||||
|
person.setCollectedfrom(Arrays.asList(kv));
|
||||||
|
person
|
||||||
|
.setAlternativeNames(
|
||||||
|
Optional
|
||||||
|
.ofNullable(op.getOtherNames())
|
||||||
|
|
||||||
|
.orElse(new ArrayList<>()));
|
||||||
|
person
|
||||||
|
.setFamilyName(
|
||||||
|
Optional
|
||||||
|
.ofNullable(op.getFamilyName())
|
||||||
|
|
||||||
|
.orElse(""));
|
||||||
|
person
|
||||||
|
.setGivenName(
|
||||||
|
Optional
|
||||||
|
.ofNullable(op.getGivenName())
|
||||||
|
|
||||||
|
.orElse(""));
|
||||||
|
person
|
||||||
|
.setPid(
|
||||||
|
Optional
|
||||||
|
.ofNullable(op.getOtherPids())
|
||||||
|
.map(
|
||||||
|
v -> v
|
||||||
|
.stream()
|
||||||
|
.map(
|
||||||
|
p -> OafMapperUtils
|
||||||
|
.structuredProperty(
|
||||||
|
p.getValue(), p.getSchema(), p.getSchema(), ModelConstants.DNET_PID_TYPES,
|
||||||
|
ModelConstants.DNET_PID_TYPES, null))
|
||||||
|
.collect(Collectors.toList()))
|
||||||
|
.orElse(new ArrayList<>()));
|
||||||
|
person
|
||||||
|
.getPid()
|
||||||
|
.add(
|
||||||
|
OafMapperUtils
|
||||||
|
.structuredProperty(
|
||||||
|
op.getOrcid(), ModelConstants.ORCID, ModelConstants.ORCID_CLASSNAME,
|
||||||
|
ModelConstants.DNET_PID_TYPES, ModelConstants.DNET_PID_TYPES, null));
|
||||||
|
person.setDateofcollection(op.getLastModifiedDate());
|
||||||
|
person.setOriginalId(Arrays.asList(op.getOrcid()));
|
||||||
|
return person;
|
||||||
|
}, Encoders.bean(Person.class))
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(workingDir + "/people");
|
||||||
|
|
||||||
|
works
|
||||||
|
.flatMap(
|
||||||
|
(FlatMapFunction<Work, Relation>) ExtractPerson::getAuthorshipRelationIterator,
|
||||||
|
Encoders.bean(Relation.class))
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(workingDir + "/authorship");
|
||||||
|
|
||||||
|
Dataset<Relation> coauthorship = works
|
||||||
|
.flatMap((FlatMapFunction<Work, Tuple2<String, String>>) w -> {
|
||||||
|
List<Tuple2<String, String>> lista = new ArrayList<>();
|
||||||
|
w.getPids().stream().forEach(p -> {
|
||||||
|
if (p.getSchema().equalsIgnoreCase("doi") || p.getSchema().equalsIgnoreCase("pmc")
|
||||||
|
|| p.getSchema().equalsIgnoreCase("pmid") || p.getSchema().equalsIgnoreCase("arxiv"))
|
||||||
|
lista.add(new Tuple2<>(p.getValue(), w.getOrcid()));
|
||||||
|
});
|
||||||
|
return lista.iterator();
|
||||||
|
}, Encoders.tuple(Encoders.STRING(), Encoders.STRING()))
|
||||||
|
.groupByKey((MapFunction<Tuple2<String, String>, String>) Tuple2::_1, Encoders.STRING())
|
||||||
|
.mapGroups(
|
||||||
|
(MapGroupsFunction<String, Tuple2<String, String>, Coauthors>) (k, it) -> extractCoAuthors(it),
|
||||||
|
Encoders.bean(Coauthors.class))
|
||||||
|
.flatMap(
|
||||||
|
(FlatMapFunction<Coauthors, Relation>) c -> new CoAuthorshipIterator(c.getCoauthors()),
|
||||||
|
Encoders.bean(Relation.class))
|
||||||
|
.groupByKey((MapFunction<Relation, String>) r -> r.getSource() + r.getTarget(), Encoders.STRING())
|
||||||
|
.mapGroups(
|
||||||
|
(MapGroupsFunction<String, Relation, Relation>) (k, it) -> it.next(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
coauthorship
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(workingDir + "/coauthorship");
|
||||||
|
|
||||||
|
employment
|
||||||
|
.filter((FilterFunction<Employment>) e -> Optional.ofNullable(e.getAffiliationId()).isPresent())
|
||||||
|
.filter((FilterFunction<Employment>) e -> e.getAffiliationId().getSchema().equalsIgnoreCase("ror"))
|
||||||
|
.map(
|
||||||
|
(MapFunction<Employment, Relation>) ExtractPerson::getAffiliationRelation,
|
||||||
|
Encoders.bean(Relation.class))
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(workingDir + "/affiliation");
|
||||||
|
|
||||||
|
people = spark
|
||||||
|
.read()
|
||||||
|
.textFile(workingDir + "/people")
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Person>) value -> OBJECT_MAPPER
|
||||||
|
.readValue(value, Person.class),
|
||||||
|
Encoders.bean(Person.class));
|
||||||
|
|
||||||
|
people.show(false);
|
||||||
|
people
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
|
.union(
|
||||||
|
getRelations(spark, workingDir + "/authorship").toJavaRDD().map(r -> new AtomicAction(r.getClass(), r)))
|
||||||
|
.union(
|
||||||
|
getRelations(spark, workingDir + "/coauthorship")
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(r -> new AtomicAction(r.getClass(), r)))
|
||||||
|
.union(
|
||||||
|
getRelations(spark, workingDir + "/affiliation")
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(r -> new AtomicAction(r.getClass(), r)))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
|
.saveAsHadoopFile(
|
||||||
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dataset<Relation> getRelations(SparkSession spark, String path) {
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.textFile(path)
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Relation>) value -> OBJECT_MAPPER
|
||||||
|
.readValue(value, Relation.class),
|
||||||
|
Encoders.bean(Relation.class));// spark.read().json(path).as(Encoders.bean(Relation.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Coauthors extractCoAuthors(Iterator<Tuple2<String, String>> it) {
|
||||||
|
Coauthors coauth = new Coauthors();
|
||||||
|
List<String> coauthors = new ArrayList<>();
|
||||||
|
while (it.hasNext())
|
||||||
|
coauthors.add(it.next()._2());
|
||||||
|
coauth.setCoauthors(coauthors);
|
||||||
|
|
||||||
|
return coauth;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Relation getAffiliationRelation(Employment row) {
|
||||||
|
String source = PERSON_PREFIX + IdentifierFactory.md5(row.getOrcid());
|
||||||
|
String target = ROR_PREFIX
|
||||||
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("ROR", row.getAffiliationId().getValue()));
|
||||||
|
List<KeyValue> properties = new ArrayList<>();
|
||||||
|
|
||||||
|
Relation relation = OafMapperUtils
|
||||||
|
.getRelation(
|
||||||
|
source, target, ModelConstants.ORG_PERSON_RELTYPE, ModelConstants.ORG_PERSON_SUBRELTYPE,
|
||||||
|
ModelConstants.ORG_PERSON_PARTICIPATES,
|
||||||
|
Arrays.asList(OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS)),
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
ORCID_AUTHORS_CLASSID, ORCID_AUTHORS_CLASSNAME, ModelConstants.DNET_PROVENANCE_ACTIONS,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.91"),
|
||||||
|
null);
|
||||||
|
|
||||||
|
if (Optional.ofNullable(row.getStartDate()).isPresent() && StringUtil.isNotBlank(row.getStartDate())) {
|
||||||
|
KeyValue kv = new KeyValue();
|
||||||
|
kv.setKey("startDate");
|
||||||
|
kv.setValue(row.getStartDate());
|
||||||
|
properties.add(kv);
|
||||||
|
}
|
||||||
|
if (Optional.ofNullable(row.getEndDate()).isPresent() && StringUtil.isNotBlank(row.getEndDate())) {
|
||||||
|
KeyValue kv = new KeyValue();
|
||||||
|
kv.setKey("endDate");
|
||||||
|
kv.setValue(row.getEndDate());
|
||||||
|
properties.add(kv);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (properties.size() > 0)
|
||||||
|
relation.setProperties(properties);
|
||||||
|
return relation;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Collection<? extends Relation> getCoAuthorshipRelations(String orcid1, String orcid2) {
|
||||||
|
String source = PERSON_PREFIX + "::" + IdentifierFactory.md5(orcid1);
|
||||||
|
String target = PERSON_PREFIX + "::" + IdentifierFactory.md5(orcid2);
|
||||||
|
|
||||||
|
return Arrays
|
||||||
|
.asList(
|
||||||
|
OafMapperUtils
|
||||||
|
.getRelation(
|
||||||
|
source, target, ModelConstants.PERSON_PERSON_RELTYPE,
|
||||||
|
ModelConstants.PERSON_PERSON_SUBRELTYPE,
|
||||||
|
ModelConstants.PERSON_PERSON_HASCOAUTHORED,
|
||||||
|
Arrays.asList(OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS)),
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
ORCID_AUTHORS_CLASSID, ORCID_AUTHORS_CLASSNAME,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.91"),
|
||||||
|
null),
|
||||||
|
OafMapperUtils
|
||||||
|
.getRelation(
|
||||||
|
target, source, ModelConstants.PERSON_PERSON_RELTYPE,
|
||||||
|
ModelConstants.PERSON_PERSON_SUBRELTYPE,
|
||||||
|
ModelConstants.PERSON_PERSON_HASCOAUTHORED,
|
||||||
|
Arrays.asList(OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS)),
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
ORCID_AUTHORS_CLASSID, ORCID_AUTHORS_CLASSNAME,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.91"),
|
||||||
|
null));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static @NotNull Iterator<Relation> getAuthorshipRelationIterator(Work w) {
|
||||||
|
|
||||||
|
if (Optional.ofNullable(w.getPids()).isPresent())
|
||||||
|
return w
|
||||||
|
.getPids()
|
||||||
|
.stream()
|
||||||
|
.map(pid -> getRelation(w.getOrcid(), pid))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.collect(Collectors.toList())
|
||||||
|
.iterator();
|
||||||
|
List<Relation> ret = new ArrayList<>();
|
||||||
|
return ret.iterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Relation getRelation(String orcid, eu.dnetlib.dhp.collection.orcid.model.Pid pid) {
|
||||||
|
String target;
|
||||||
|
String source = PERSON_PREFIX + "::" + IdentifierFactory.md5(orcid);
|
||||||
|
switch (pid.getSchema()) {
|
||||||
|
case "doi":
|
||||||
|
target = DOI_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), pid.getValue()));
|
||||||
|
break;
|
||||||
|
case "pmid":
|
||||||
|
target = PMID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), pid.getValue()));
|
||||||
|
break;
|
||||||
|
case "arxiv":
|
||||||
|
target = ARXIV_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.arXiv.toString(), pid.getValue()));
|
||||||
|
break;
|
||||||
|
case "pmcid":
|
||||||
|
target = PMCID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmc.toString(), pid.getValue()));
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return OafMapperUtils
|
||||||
|
.getRelation(
|
||||||
|
source, target, ModelConstants.RESULT_PERSON_RELTYPE,
|
||||||
|
ModelConstants.RESULT_PERSON_SUBRELTYPE,
|
||||||
|
ModelConstants.RESULT_PERSON_HASAUTHORED,
|
||||||
|
Arrays.asList(OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS)),
|
||||||
|
OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false, null, false, false,
|
||||||
|
OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
ORCID_AUTHORS_CLASSID, ORCID_AUTHORS_CLASSNAME, ModelConstants.DNET_PROVENANCE_ACTIONS,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
|
"0.91"),
|
||||||
|
null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.personentity;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.orcid.model.Work;
|
||||||
|
|
||||||
|
public class WorkList implements Serializable {
|
||||||
|
private ArrayList<Work> workArrayList;
|
||||||
|
|
||||||
|
public ArrayList<Work> getWorkArrayList() {
|
||||||
|
return workArrayList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setWorkArrayList(ArrayList<Work> workArrayList) {
|
||||||
|
this.workArrayList = workArrayList;
|
||||||
|
}
|
||||||
|
|
||||||
|
public WorkList() {
|
||||||
|
workArrayList = new ArrayList<>();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,91 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.sdgnodoi;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.fs.Hdfs;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class CreateActionSetSparkJob implements Serializable {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
Objects
|
||||||
|
.requireNonNull(
|
||||||
|
CreateActionSetSparkJob.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/fosnodoi/as_parameters.json"))));
|
||||||
|
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath {}", outputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
|
||||||
|
createActionSet(spark, inputPath, outputPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void createActionSet(SparkSession spark, String inputPath, String outputPath) {
|
||||||
|
spark
|
||||||
|
.read()
|
||||||
|
.textFile(inputPath)
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Result>) value -> OBJECT_MAPPER.readValue(value, Result.class),
|
||||||
|
Encoders.bean(Result.class))
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
|
.saveAsHadoopFile(
|
||||||
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -5,11 +5,10 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.FilterFunction;
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
@ -113,7 +112,7 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -153,11 +152,40 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
.select("OpenAlexId");
|
.select("OpenAlexId");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static List<Relation> createAffiliationRelationPairPMCID(String pmcid, String ror) {
|
||||||
|
if (pmcid == null)
|
||||||
|
return new ArrayList<>();
|
||||||
|
|
||||||
|
return createAffiliatioRelationPair(
|
||||||
|
PMCID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmc.toString(), removeResolver("PMC", pmcid))),
|
||||||
|
ror);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static List<Relation> createAffiliationRelationPairPMID(String pmid, String ror) {
|
||||||
|
if (pmid == null)
|
||||||
|
return new ArrayList<>();
|
||||||
|
|
||||||
|
return createAffiliatioRelationPair(
|
||||||
|
PMID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), removeResolver("PMID", pmid))),
|
||||||
|
ror);
|
||||||
|
}
|
||||||
|
|
||||||
private static String removeResolver(String pidType, String pid) {
|
private static String removeResolver(String pidType, String pid) {
|
||||||
if (pidType.equals("DOI")) {
|
switch (pidType) {
|
||||||
|
case "PMID":
|
||||||
|
return pid.substring(33);
|
||||||
|
case "PMC":
|
||||||
|
return "PMC" + pid.substring(43);
|
||||||
|
case "DOI":
|
||||||
return pid.substring(16);
|
return pid.substring(16);
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException("DOI is the only supported PID type");
|
|
||||||
|
throw new RuntimeException();
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<Relation> createAffiliationRelationPairDOI(String doi, String ror) {
|
private static List<Relation> createAffiliationRelationPairDOI(String doi, String ror) {
|
||||||
|
|
|
@ -0,0 +1,158 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.webcrawl;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static org.apache.spark.sql.functions.*;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.io.filefilter.DirectoryFileFilter;
|
||||||
|
import org.apache.commons.io.filefilter.FileFileFilter;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.apache.spark.sql.types.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.HdfsSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
public class RemoveRelationFromActionSet
|
||||||
|
implements Serializable {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetFromWebEntries.class);
|
||||||
|
|
||||||
|
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||||
|
private static final StructType KV_SCHEMA = StructType$.MODULE$
|
||||||
|
.apply(
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
|
StructField$.MODULE$.apply("key", DataTypes.StringType, false, Metadata.empty()),
|
||||||
|
StructField$.MODULE$.apply("value", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
|
private static final StructType ATOMIC_ACTION_SCHEMA = StructType$.MODULE$
|
||||||
|
.apply(
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
|
StructField$.MODULE$.apply("clazz", DataTypes.StringType, false, Metadata.empty()),
|
||||||
|
StructField$.MODULE$
|
||||||
|
.apply(
|
||||||
|
"payload", DataTypes.StringType, false, Metadata.empty())));
|
||||||
|
|
||||||
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
CreateActionSetFromWebEntries.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/as_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
// the actionSet path
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("outputPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
final String blackListInputPath = parser.get("blackListPath");
|
||||||
|
log.info("blackListInputPath: {}", blackListInputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
|
||||||
|
removeFromActionSet(spark, inputPath, outputPath, blackListInputPath);
|
||||||
|
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void removeFromActionSet(SparkSession spark, String inputPath, String outputPath,
|
||||||
|
String blackListInputPath) {
|
||||||
|
// read the blacklist
|
||||||
|
Dataset<String> blackList = readBlackList(spark, blackListInputPath)
|
||||||
|
.map(
|
||||||
|
(MapFunction<Row, String>) r -> IdentifierFactory
|
||||||
|
.idFromPid("50", "doi", ((String) r.getAs("doi")).substring(16), true),
|
||||||
|
Encoders.STRING());
|
||||||
|
|
||||||
|
// read the old actionset and get the relations in the payload
|
||||||
|
JavaPairRDD<Text, Text> seq = JavaSparkContext
|
||||||
|
.fromSparkContext(spark.sparkContext())
|
||||||
|
.sequenceFile(inputPath, Text.class, Text.class);
|
||||||
|
|
||||||
|
JavaRDD<Row> rdd = seq
|
||||||
|
.map(x -> RowFactory.create(x._1().toString(), x._2().toString()));
|
||||||
|
|
||||||
|
Dataset<Row> actionSet = spark
|
||||||
|
.createDataFrame(rdd, KV_SCHEMA)
|
||||||
|
.withColumn("atomic_action", from_json(col("value"), ATOMIC_ACTION_SCHEMA))
|
||||||
|
.select(expr("atomic_action.*"));
|
||||||
|
|
||||||
|
Dataset<Relation> relation = actionSet
|
||||||
|
.map(
|
||||||
|
(MapFunction<Row, Relation>) r -> MAPPER.readValue((String) r.getAs("payload"), Relation.class),
|
||||||
|
Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
// select only the relation not matching any pid in the blacklist as source for the relation
|
||||||
|
Dataset<Relation> relNoSource = relation
|
||||||
|
.joinWith(blackList, relation.col("source").equalTo(blackList.col("value")), "left")
|
||||||
|
.filter((FilterFunction<Tuple2<Relation, String>>) t2 -> t2._2() == null)
|
||||||
|
.map((MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class));
|
||||||
|
|
||||||
|
// select only the relation not matching any pid in the blacklist as target of the relation
|
||||||
|
relNoSource
|
||||||
|
.joinWith(blackList, relNoSource.col("target").equalTo(blackList.col("value")), "left")
|
||||||
|
.filter((FilterFunction<Tuple2<Relation, String>>) t2 -> t2._2() == null)
|
||||||
|
.map((MapFunction<Tuple2<Relation, String>, Relation>) t2 -> t2._1(), Encoders.bean(Relation.class))
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
|
||||||
|
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
||||||
|
;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dataset<Row> readBlackList(SparkSession spark, String inputPath) {
|
||||||
|
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.json(inputPath)
|
||||||
|
.select("doi");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -22,9 +22,11 @@ import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.base.BaseCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.base.BaseCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.file.FileCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.file.FileCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.file.FileGZipCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.file.FileGZipCollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.gtr2.Gtr2PublicationsCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.mongodb.MDStoreCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.mongodb.MDStoreCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.mongodb.MongoDbDumpCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.mongodb.MongoDbDumpCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.oai.OaiCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.oai.OaiCollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.osf.OsfPreprintsCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.collection.plugin.rest.RestCollectorPlugin;
|
import eu.dnetlib.dhp.collection.plugin.rest.RestCollectorPlugin;
|
||||||
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
import eu.dnetlib.dhp.common.collection.CollectorException;
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
@ -58,7 +60,7 @@ public class CollectorWorker extends ReportingJob {
|
||||||
|
|
||||||
public void collect() throws UnknownCollectorPluginException, CollectorException, IOException {
|
public void collect() throws UnknownCollectorPluginException, CollectorException, IOException {
|
||||||
|
|
||||||
final String outputPath = mdStoreVersion.getHdfsPath() + SEQUENCE_FILE_NAME;
|
final String outputPath = this.mdStoreVersion.getHdfsPath() + SEQUENCE_FILE_NAME;
|
||||||
log.info("outputPath path is {}", outputPath);
|
log.info("outputPath path is {}", outputPath);
|
||||||
|
|
||||||
final CollectorPlugin plugin = getCollectorPlugin();
|
final CollectorPlugin plugin = getCollectorPlugin();
|
||||||
|
@ -68,36 +70,36 @@ public class CollectorWorker extends ReportingJob {
|
||||||
|
|
||||||
try (SequenceFile.Writer writer = SequenceFile
|
try (SequenceFile.Writer writer = SequenceFile
|
||||||
.createWriter(
|
.createWriter(
|
||||||
fileSystem.getConf(),
|
this.fileSystem.getConf(), SequenceFile.Writer.file(new Path(outputPath)), SequenceFile.Writer
|
||||||
SequenceFile.Writer.file(new Path(outputPath)),
|
.keyClass(IntWritable.class),
|
||||||
SequenceFile.Writer.keyClass(IntWritable.class),
|
SequenceFile.Writer
|
||||||
SequenceFile.Writer.valueClass(Text.class),
|
.valueClass(Text.class),
|
||||||
SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
|
SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
|
||||||
final IntWritable key = new IntWritable(counter.get());
|
final IntWritable key = new IntWritable(counter.get());
|
||||||
final Text value = new Text();
|
final Text value = new Text();
|
||||||
plugin
|
plugin
|
||||||
.collect(api, report)
|
.collect(this.api, this.report)
|
||||||
.forEach(
|
.forEach(content -> {
|
||||||
content -> {
|
|
||||||
key.set(counter.getAndIncrement());
|
key.set(counter.getAndIncrement());
|
||||||
value.set(content);
|
value.set(content);
|
||||||
try {
|
try {
|
||||||
writer.append(key, value);
|
writer.append(key, value);
|
||||||
} catch (Throwable e) {
|
} catch (final Throwable e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (Throwable e) {
|
} catch (final Throwable e) {
|
||||||
report.put(e.getClass().getName(), e.getMessage());
|
this.report.put(e.getClass().getName(), e.getMessage());
|
||||||
throw new CollectorException(e);
|
throw new CollectorException(e);
|
||||||
} finally {
|
} finally {
|
||||||
shutdown();
|
shutdown();
|
||||||
report.ongoing(counter.longValue(), counter.longValue());
|
this.report.ongoing(counter.longValue(), counter.longValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void scheduleReport(AtomicInteger counter) {
|
private void scheduleReport(final AtomicInteger counter) {
|
||||||
schedule(new ReporterCallback() {
|
schedule(new ReporterCallback() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Long getCurrent() {
|
public Long getCurrent() {
|
||||||
return counter.longValue();
|
return counter.longValue();
|
||||||
|
@ -112,33 +114,37 @@ public class CollectorWorker extends ReportingJob {
|
||||||
|
|
||||||
private CollectorPlugin getCollectorPlugin() throws UnknownCollectorPluginException {
|
private CollectorPlugin getCollectorPlugin() throws UnknownCollectorPluginException {
|
||||||
|
|
||||||
switch (CollectorPlugin.NAME.valueOf(api.getProtocol())) {
|
switch (CollectorPlugin.NAME.valueOf(this.api.getProtocol())) {
|
||||||
case oai:
|
case oai:
|
||||||
return new OaiCollectorPlugin(clientParams);
|
return new OaiCollectorPlugin(this.clientParams);
|
||||||
case rest_json2xml:
|
case rest_json2xml:
|
||||||
return new RestCollectorPlugin(clientParams);
|
return new RestCollectorPlugin(this.clientParams);
|
||||||
case file:
|
case file:
|
||||||
return new FileCollectorPlugin(fileSystem);
|
return new FileCollectorPlugin(this.fileSystem);
|
||||||
case fileGzip:
|
case fileGzip:
|
||||||
return new FileGZipCollectorPlugin(fileSystem);
|
return new FileGZipCollectorPlugin(this.fileSystem);
|
||||||
case baseDump:
|
case baseDump:
|
||||||
return new BaseCollectorPlugin(this.fileSystem);
|
return new BaseCollectorPlugin(this.fileSystem);
|
||||||
|
case gtr2Publications:
|
||||||
|
return new Gtr2PublicationsCollectorPlugin(this.clientParams);
|
||||||
|
case osfPreprints:
|
||||||
|
return new OsfPreprintsCollectorPlugin(this.clientParams);
|
||||||
case other:
|
case other:
|
||||||
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
||||||
.ofNullable(api.getParams().get("other_plugin_type"))
|
.ofNullable(this.api.getParams().get("other_plugin_type"))
|
||||||
.map(CollectorPlugin.NAME.OTHER_NAME::valueOf)
|
.map(CollectorPlugin.NAME.OTHER_NAME::valueOf)
|
||||||
.orElseThrow(() -> new IllegalArgumentException("invalid other_plugin_type"));
|
.orElseThrow(() -> new IllegalArgumentException("invalid other_plugin_type"));
|
||||||
|
|
||||||
switch (plugin) {
|
switch (plugin) {
|
||||||
case mdstore_mongodb_dump:
|
case mdstore_mongodb_dump:
|
||||||
return new MongoDbDumpCollectorPlugin(fileSystem);
|
return new MongoDbDumpCollectorPlugin(this.fileSystem);
|
||||||
case mdstore_mongodb:
|
case mdstore_mongodb:
|
||||||
return new MDStoreCollectorPlugin();
|
return new MDStoreCollectorPlugin();
|
||||||
default:
|
default:
|
||||||
throw new UnknownCollectorPluginException("plugin is not managed: " + plugin);
|
throw new UnknownCollectorPluginException("plugin is not managed: " + plugin);
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
throw new UnknownCollectorPluginException("protocol is not managed: " + api.getProtocol());
|
throw new UnknownCollectorPluginException("protocol is not managed: " + this.api.getProtocol());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,9 @@ public class Author extends ORCIDItem {
|
||||||
|
|
||||||
private String lastModifiedDate;
|
private String lastModifiedDate;
|
||||||
|
|
||||||
|
public Author() {
|
||||||
|
}
|
||||||
|
|
||||||
public String getBiography() {
|
public String getBiography() {
|
||||||
return biography;
|
return biography;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,4 +11,7 @@ public class ORCIDItem {
|
||||||
public void setOrcid(String orcid) {
|
public void setOrcid(String orcid) {
|
||||||
this.orcid = orcid;
|
this.orcid = orcid;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public ORCIDItem() {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,4 +32,6 @@ public class Work extends ORCIDItem {
|
||||||
pids.add(pid);
|
pids.add(pid);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Work() {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@ public interface CollectorPlugin {
|
||||||
|
|
||||||
enum NAME {
|
enum NAME {
|
||||||
|
|
||||||
oai, other, rest_json2xml, file, fileGzip, baseDump;
|
oai, other, rest_json2xml, file, fileGzip, baseDump, gtr2Publications, osfPreprints;
|
||||||
|
|
||||||
public enum OTHER_NAME {
|
public enum OTHER_NAME {
|
||||||
mdstore_mongodb_dump, mdstore_mongodb
|
mdstore_mongodb_dump, mdstore_mongodb
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.gtr2;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.Spliterators;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
|
public class Gtr2PublicationsCollectorPlugin implements CollectorPlugin {
|
||||||
|
|
||||||
|
private final HttpClientParams clientParams;
|
||||||
|
|
||||||
|
public Gtr2PublicationsCollectorPlugin(final HttpClientParams clientParams) {
|
||||||
|
this.clientParams = clientParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Stream<String> collect(final ApiDescriptor api, final AggregatorReport report) throws CollectorException {
|
||||||
|
|
||||||
|
final String baseUrl = api.getBaseUrl();
|
||||||
|
final String startPage = api.getParams().get("startPage");
|
||||||
|
final String endPage = api.getParams().get("endPage");
|
||||||
|
final String fromDate = api.getParams().get("fromDate");
|
||||||
|
|
||||||
|
if ((fromDate != null) && !fromDate.matches("\\d{4}-\\d{2}-\\d{2}")) {
|
||||||
|
throw new CollectorException("Invalid date (YYYY-MM-DD): " + fromDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseUrl, fromDate, startPage, endPage,
|
||||||
|
this.clientParams);
|
||||||
|
final Spliterator<String> spliterator = Spliterators.spliteratorUnknownSize(iterator, Spliterator.ORDERED);
|
||||||
|
|
||||||
|
return StreamSupport.stream(spliterator, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,215 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.gtr2;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.LinkedList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Queue;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.math.NumberUtils;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentException;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Element;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.format.DateTimeFormat;
|
||||||
|
import org.joda.time.format.DateTimeFormatter;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpConnector2;
|
||||||
|
|
||||||
|
public class Gtr2PublicationsIterator implements Iterator<String> {
|
||||||
|
|
||||||
|
public static final int PAGE_SIZE = 20;
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(Gtr2PublicationsIterator.class);
|
||||||
|
|
||||||
|
private final HttpConnector2 connector;
|
||||||
|
private static final DateTimeFormatter simpleDateTimeFormatter = DateTimeFormat.forPattern("yyyy-MM-dd");
|
||||||
|
|
||||||
|
private static final int MAX_ATTEMPTS = 10;
|
||||||
|
|
||||||
|
private final String baseUrl;
|
||||||
|
private int currPage;
|
||||||
|
private int endPage;
|
||||||
|
private boolean incremental = false;
|
||||||
|
private DateTime fromDate;
|
||||||
|
|
||||||
|
private final Map<String, String> cache = new HashMap<>();
|
||||||
|
|
||||||
|
private final Queue<String> queue = new LinkedList<>();
|
||||||
|
|
||||||
|
private String nextElement;
|
||||||
|
|
||||||
|
public Gtr2PublicationsIterator(final String baseUrl, final String fromDate, final String startPage,
|
||||||
|
final String endPage,
|
||||||
|
final HttpClientParams clientParams)
|
||||||
|
throws CollectorException {
|
||||||
|
|
||||||
|
this.baseUrl = baseUrl;
|
||||||
|
this.currPage = NumberUtils.toInt(startPage, 1);
|
||||||
|
this.endPage = NumberUtils.toInt(endPage, Integer.MAX_VALUE);
|
||||||
|
this.incremental = StringUtils.isNotBlank(fromDate);
|
||||||
|
this.connector = new HttpConnector2(clientParams);
|
||||||
|
|
||||||
|
if (this.incremental) {
|
||||||
|
this.fromDate = parseDate(fromDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
prepareNextElement();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasNext() {
|
||||||
|
return this.nextElement != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String next() {
|
||||||
|
try {
|
||||||
|
return this.nextElement;
|
||||||
|
} finally {
|
||||||
|
prepareNextElement();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void remove() {
|
||||||
|
throw new UnsupportedOperationException();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void prepareNextElement() {
|
||||||
|
while ((this.currPage <= this.endPage) && this.queue.isEmpty()) {
|
||||||
|
log.debug("FETCHING PAGE + " + this.currPage + "/" + this.endPage);
|
||||||
|
this.queue.addAll(fetchPage(this.currPage++));
|
||||||
|
}
|
||||||
|
this.nextElement = this.queue.poll();
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> fetchPage(final int pageNumber) {
|
||||||
|
|
||||||
|
final List<String> res = new ArrayList<>();
|
||||||
|
try {
|
||||||
|
final Document doc = loadURL(cleanURL(this.baseUrl + "/outcomes/publications?p=" + pageNumber), 0);
|
||||||
|
|
||||||
|
if (this.endPage == Integer.MAX_VALUE) {
|
||||||
|
this.endPage = NumberUtils.toInt(doc.valueOf("/*/@*[local-name() = 'totalPages']"));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (final Object po : doc.selectNodes("//*[local-name() = 'publication']")) {
|
||||||
|
final Element mainEntity = (Element) ((Element) po).detach();
|
||||||
|
|
||||||
|
if (filterIncremental(mainEntity)) {
|
||||||
|
res.add(expandMainEntity(mainEntity));
|
||||||
|
} else {
|
||||||
|
log.debug("Skipped entity");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.error("Exception fetching page " + pageNumber, e);
|
||||||
|
throw new RuntimeException("Exception fetching page " + pageNumber, e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addLinkedEntities(final Element master, final String relType, final Element newRoot,
|
||||||
|
final Function<Document, Element> mapper) {
|
||||||
|
|
||||||
|
for (final Object o : master.selectNodes(".//*[local-name()='link']")) {
|
||||||
|
final String rel = ((Element) o).valueOf("@*[local-name()='rel']");
|
||||||
|
final String href = ((Element) o).valueOf("@*[local-name()='href']");
|
||||||
|
|
||||||
|
if (relType.equals(rel) && StringUtils.isNotBlank(href)) {
|
||||||
|
final String cacheKey = relType + "#" + href;
|
||||||
|
if (this.cache.containsKey(cacheKey)) {
|
||||||
|
try {
|
||||||
|
log.debug(" * from cache (" + relType + "): " + href);
|
||||||
|
newRoot.add(DocumentHelper.parseText(this.cache.get(cacheKey)).getRootElement());
|
||||||
|
} catch (final DocumentException e) {
|
||||||
|
log.error("Error retrieving cache element: " + cacheKey, e);
|
||||||
|
throw new RuntimeException("Error retrieving cache element: " + cacheKey, e);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
final Document doc = loadURL(cleanURL(href), 0);
|
||||||
|
final Element elem = mapper.apply(doc);
|
||||||
|
newRoot.add(elem);
|
||||||
|
this.cache.put(cacheKey, elem.asXML());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean filterIncremental(final Element e) {
|
||||||
|
if (!this.incremental || isAfter(e.valueOf("@*[local-name() = 'created']"), this.fromDate)
|
||||||
|
|| isAfter(e.valueOf("@*[local-name() = 'updated']"), this.fromDate)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String expandMainEntity(final Element mainEntity) {
|
||||||
|
final Element newRoot = DocumentHelper.createElement("doc");
|
||||||
|
newRoot.add(mainEntity);
|
||||||
|
addLinkedEntities(mainEntity, "PROJECT", newRoot, this::asProjectElement);
|
||||||
|
return DocumentHelper.createDocument(newRoot).asXML();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Element asProjectElement(final Document doc) {
|
||||||
|
final Element newOrg = DocumentHelper.createElement("project");
|
||||||
|
newOrg.addElement("id").setText(doc.valueOf("/*/@*[local-name()='id']"));
|
||||||
|
newOrg
|
||||||
|
.addElement("code")
|
||||||
|
.setText(doc.valueOf("//*[local-name()='identifier' and @*[local-name()='type'] = 'RCUK']"));
|
||||||
|
newOrg.addElement("title").setText(doc.valueOf("//*[local-name()='title']"));
|
||||||
|
return newOrg;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String cleanURL(final String url) {
|
||||||
|
String cleaned = url;
|
||||||
|
if (cleaned.contains("gtr.gtr")) {
|
||||||
|
cleaned = cleaned.replace("gtr.gtr", "gtr");
|
||||||
|
}
|
||||||
|
if (cleaned.startsWith("http://")) {
|
||||||
|
cleaned = cleaned.replaceFirst("http://", "https://");
|
||||||
|
}
|
||||||
|
return cleaned;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Document loadURL(final String cleanUrl, final int attempt) {
|
||||||
|
try {
|
||||||
|
log.debug(" * Downloading Url: " + cleanUrl);
|
||||||
|
final byte[] bytes = this.connector.getInputSource(cleanUrl).getBytes("UTF-8");
|
||||||
|
return DocumentHelper.parseText(new String(bytes));
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.error("Error dowloading url: " + cleanUrl + ", attempt = " + attempt, e);
|
||||||
|
if (attempt >= MAX_ATTEMPTS) {
|
||||||
|
throw new RuntimeException("Error dowloading url: " + cleanUrl, e);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Thread.sleep(60000); // I wait for a minute
|
||||||
|
} catch (final InterruptedException e1) {
|
||||||
|
throw new RuntimeException("Error dowloading url: " + cleanUrl, e);
|
||||||
|
}
|
||||||
|
return loadURL(cleanUrl, attempt + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private DateTime parseDate(final String s) {
|
||||||
|
return DateTime.parse(s.contains("T") ? s.substring(0, s.indexOf("T")) : s, simpleDateTimeFormatter);
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isAfter(final String d, final DateTime fromDate) {
|
||||||
|
return StringUtils.isNotBlank(d) && parseDate(d).isAfter(fromDate);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.osf;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.Spliterators;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.commons.lang3.math.NumberUtils;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
|
public class OsfPreprintsCollectorPlugin implements CollectorPlugin {
|
||||||
|
|
||||||
|
public static final int PAGE_SIZE_VALUE_DEFAULT = 100;
|
||||||
|
|
||||||
|
private final HttpClientParams clientParams;
|
||||||
|
|
||||||
|
public OsfPreprintsCollectorPlugin(final HttpClientParams clientParams) {
|
||||||
|
this.clientParams = clientParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Stream<String> collect(final ApiDescriptor api, final AggregatorReport report) throws CollectorException {
|
||||||
|
final String baseUrl = api.getBaseUrl();
|
||||||
|
|
||||||
|
final int pageSize = Optional
|
||||||
|
.ofNullable(api.getParams().get("pageSize"))
|
||||||
|
.filter(StringUtils::isNotBlank)
|
||||||
|
.map(s -> NumberUtils.toInt(s, PAGE_SIZE_VALUE_DEFAULT))
|
||||||
|
.orElse(PAGE_SIZE_VALUE_DEFAULT);
|
||||||
|
|
||||||
|
if (StringUtils.isBlank(baseUrl)) {
|
||||||
|
throw new CollectorException("Param 'baseUrl' is null or empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
final OsfPreprintsIterator it = new OsfPreprintsIterator(baseUrl, pageSize, getClientParams());
|
||||||
|
|
||||||
|
return StreamSupport
|
||||||
|
.stream(Spliterators.spliteratorUnknownSize(it, Spliterator.ORDERED), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public HttpClientParams getClientParams() {
|
||||||
|
return this.clientParams;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,151 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.osf;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Queue;
|
||||||
|
import java.util.concurrent.PriorityBlockingQueue;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.dom4j.Document;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.dom4j.Element;
|
||||||
|
import org.dom4j.Node;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.utils.JsonUtils;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpConnector2;
|
||||||
|
|
||||||
|
public class OsfPreprintsIterator implements Iterator<String> {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(OsfPreprintsIterator.class);
|
||||||
|
|
||||||
|
private static final int MAX_ATTEMPTS = 5;
|
||||||
|
|
||||||
|
private final HttpClientParams clientParams;
|
||||||
|
|
||||||
|
private final String baseUrl;
|
||||||
|
private final int pageSize;
|
||||||
|
|
||||||
|
private String currentUrl;
|
||||||
|
|
||||||
|
private final Queue<String> recordQueue = new PriorityBlockingQueue<>();
|
||||||
|
|
||||||
|
public OsfPreprintsIterator(
|
||||||
|
final String baseUrl,
|
||||||
|
final int pageSize,
|
||||||
|
final HttpClientParams clientParams) {
|
||||||
|
|
||||||
|
this.clientParams = clientParams;
|
||||||
|
this.baseUrl = baseUrl;
|
||||||
|
this.pageSize = pageSize;
|
||||||
|
|
||||||
|
initQueue();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void initQueue() {
|
||||||
|
this.currentUrl = this.baseUrl + "?filter:is_published:d=true&format=json&page[size]=" + this.pageSize;
|
||||||
|
|
||||||
|
log.info("REST calls starting with {}", this.currentUrl);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasNext() {
|
||||||
|
synchronized (this.recordQueue) {
|
||||||
|
while (this.recordQueue.isEmpty() && StringUtils.isNotBlank(this.currentUrl)
|
||||||
|
&& this.currentUrl.startsWith("http")) {
|
||||||
|
try {
|
||||||
|
this.currentUrl = downloadPage(this.currentUrl);
|
||||||
|
} catch (final CollectorException e) {
|
||||||
|
log.debug("CollectorPlugin.next()-Exception: {}", e);
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.recordQueue.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String next() {
|
||||||
|
synchronized (this.recordQueue) {
|
||||||
|
return this.recordQueue.poll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String downloadPage(final String url) throws CollectorException {
|
||||||
|
|
||||||
|
final Document doc = downloadUrl(url, 0);
|
||||||
|
|
||||||
|
for (final Object o : doc.selectNodes("/*/data")) {
|
||||||
|
|
||||||
|
final Element n = (Element) ((Element) o).detach();
|
||||||
|
|
||||||
|
final Element group = DocumentHelper.createElement("group");
|
||||||
|
group.addAttribute("id", n.valueOf("./id"));
|
||||||
|
|
||||||
|
group.addElement("preprint").add(n);
|
||||||
|
|
||||||
|
for (final Object o1 : n.selectNodes(".//contributors//href")) {
|
||||||
|
final String href = ((Node) o1).getText();
|
||||||
|
if (StringUtils.isNotBlank(href) && href.startsWith("http")) {
|
||||||
|
final Document doc1 = downloadUrl(href, 0);
|
||||||
|
group.addElement("contributors").add(doc1.getRootElement().detach());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (final Object o1 : n.selectNodes(".//primary_file//href")) {
|
||||||
|
final String href = ((Node) o1).getText();
|
||||||
|
if (StringUtils.isNotBlank(href) && href.startsWith("http")) {
|
||||||
|
final Document doc1 = downloadUrl(href, 0);
|
||||||
|
group.addElement("primary_file").add(doc1.getRootElement().detach());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.recordQueue.add(DocumentHelper.createDocument(group).asXML());
|
||||||
|
}
|
||||||
|
|
||||||
|
return doc.valueOf("/*/links/next");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private Document downloadUrl(final String url, final int attempt) throws CollectorException {
|
||||||
|
if (attempt > MAX_ATTEMPTS) {
|
||||||
|
throw new CollectorException("Max Number of attempts reached, url:" + url);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempt > 0) {
|
||||||
|
final int delay = (attempt * 5000);
|
||||||
|
log.debug("Attempt {} with delay {}", attempt, delay);
|
||||||
|
try {
|
||||||
|
Thread.sleep(delay);
|
||||||
|
} catch (final InterruptedException e) {
|
||||||
|
new CollectorException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
log.info("requesting URL [{}]", url);
|
||||||
|
|
||||||
|
final HttpConnector2 connector = new HttpConnector2(this.clientParams);
|
||||||
|
|
||||||
|
final String json = connector.getInputSource(url);
|
||||||
|
final String xml = JsonUtils.convertToXML(json);
|
||||||
|
|
||||||
|
return DocumentHelper.parseText(xml);
|
||||||
|
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.warn(e.getMessage(), e);
|
||||||
|
if ((e instanceof CollectorException) && e.getMessage().contains("401")) {
|
||||||
|
final Element root = DocumentHelper.createElement("error_401_authorization_required");
|
||||||
|
return DocumentHelper.createDocument(root);
|
||||||
|
}
|
||||||
|
return downloadUrl(url, attempt + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,76 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.researchfi;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.Spliterators;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
import org.apache.http.NameValuePair;
|
||||||
|
import org.apache.http.client.entity.UrlEncodedFormEntity;
|
||||||
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
|
import org.apache.http.impl.client.HttpClients;
|
||||||
|
import org.apache.http.message.BasicNameValuePair;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.CollectorPlugin;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
|
||||||
|
public class ResearchFiCollectorPlugin implements CollectorPlugin {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(ResearchFiCollectorPlugin.class);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Stream<String> collect(final ApiDescriptor api, final AggregatorReport report)
|
||||||
|
throws CollectorException {
|
||||||
|
|
||||||
|
final String authUrl = api.getParams().get("auth_url");
|
||||||
|
final String clientId = api.getParams().get("auth_client_id");
|
||||||
|
final String clientSecret = api.getParams().get("auth_client_secret");
|
||||||
|
|
||||||
|
final String authToken = authenticate(authUrl, clientId, clientSecret);
|
||||||
|
|
||||||
|
final Iterator<String> iter = new ResearchFiIterator(api.getBaseUrl(), authToken);
|
||||||
|
|
||||||
|
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iter, Spliterator.ORDERED), false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private String authenticate(final String authUrl, final String clientId, final String clientSecret)
|
||||||
|
throws CollectorException {
|
||||||
|
try (final CloseableHttpClient client = HttpClients.createDefault()) {
|
||||||
|
final HttpPost req = new HttpPost(authUrl);
|
||||||
|
final List<NameValuePair> params = new ArrayList<>();
|
||||||
|
params.add(new BasicNameValuePair("grant_type", "client_credentials"));
|
||||||
|
params.add(new BasicNameValuePair("client_id", clientId));
|
||||||
|
params.add(new BasicNameValuePair("client_secret", clientSecret));
|
||||||
|
|
||||||
|
req.setEntity(new UrlEncodedFormEntity(params, "UTF-8"));
|
||||||
|
|
||||||
|
try (final CloseableHttpResponse response = client.execute(req)) {
|
||||||
|
final String content = IOUtils.toString(response.getEntity().getContent());
|
||||||
|
final JSONObject obj = new JSONObject(content);
|
||||||
|
final String token = obj.getString("access_token");
|
||||||
|
if (StringUtils.isNotBlank(token)) {
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.warn("Error obtaining access token", e);
|
||||||
|
throw new CollectorException("Error obtaining access token", e);
|
||||||
|
}
|
||||||
|
throw new CollectorException("Access token is missing");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.researchfi;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Queue;
|
||||||
|
import java.util.concurrent.PriorityBlockingQueue;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.math.NumberUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.http.Header;
|
||||||
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
|
import org.apache.http.impl.client.HttpClients;
|
||||||
|
import org.json.JSONArray;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.utils.JsonUtils;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
|
||||||
|
public class ResearchFiIterator implements Iterator<String> {
|
||||||
|
|
||||||
|
private static final Log log = LogFactory.getLog(ResearchFiIterator.class);
|
||||||
|
|
||||||
|
private static final int PAGE_SIZE = 100;
|
||||||
|
|
||||||
|
private final String baseUrl;
|
||||||
|
private final String authToken;
|
||||||
|
private int currPage;
|
||||||
|
private int nPages;
|
||||||
|
|
||||||
|
private final Queue<String> queue = new PriorityBlockingQueue<>();
|
||||||
|
|
||||||
|
public ResearchFiIterator(final String baseUrl, final String authToken) {
|
||||||
|
this.baseUrl = baseUrl;
|
||||||
|
this.authToken = authToken;
|
||||||
|
this.currPage = 0;
|
||||||
|
this.nPages = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void verifyStarted() {
|
||||||
|
if (this.currPage == 0) {
|
||||||
|
try {
|
||||||
|
nextCall();
|
||||||
|
} catch (final CollectorException e) {
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean hasNext() {
|
||||||
|
synchronized (this.queue) {
|
||||||
|
verifyStarted();
|
||||||
|
return !this.queue.isEmpty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String next() {
|
||||||
|
synchronized (this.queue) {
|
||||||
|
verifyStarted();
|
||||||
|
final String res = this.queue.poll();
|
||||||
|
while (this.queue.isEmpty() && (this.currPage < this.nPages)) {
|
||||||
|
try {
|
||||||
|
nextCall();
|
||||||
|
} catch (final CollectorException e) {
|
||||||
|
throw new IllegalStateException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void nextCall() throws CollectorException {
|
||||||
|
|
||||||
|
this.currPage += 1;
|
||||||
|
|
||||||
|
final String url;
|
||||||
|
if (!this.baseUrl.contains("?")) {
|
||||||
|
url = String.format("%s?PageNumber=%d&PageSize=%d", this.baseUrl, this.currPage, PAGE_SIZE);
|
||||||
|
} else if (!this.baseUrl.contains("PageSize=")) {
|
||||||
|
url = String.format("%s&PageNumber=%d&PageSize=%d", this.baseUrl, this.currPage, PAGE_SIZE);
|
||||||
|
} else {
|
||||||
|
url = String.format("%s&PageNumber=%d", this.baseUrl, this.currPage);
|
||||||
|
}
|
||||||
|
log.info("Calling url: " + url);
|
||||||
|
|
||||||
|
try (final CloseableHttpClient client = HttpClients.createDefault()) {
|
||||||
|
|
||||||
|
final HttpGet req = new HttpGet(url);
|
||||||
|
req.addHeader("Authorization", "Bearer " + this.authToken);
|
||||||
|
try (final CloseableHttpResponse response = client.execute(req)) {
|
||||||
|
for (final Header header : response.getAllHeaders()) {
|
||||||
|
log.debug("HEADER: " + header.getName() + " = " + header.getValue());
|
||||||
|
if ("x-page-count".equals(header.getName())) {
|
||||||
|
final int totalPages = NumberUtils.toInt(header.getValue());
|
||||||
|
if (this.nPages != totalPages) {
|
||||||
|
this.nPages = NumberUtils.toInt(header.getValue());
|
||||||
|
log.info("Total pages: " + totalPages);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final String content = IOUtils.toString(response.getEntity().getContent());
|
||||||
|
final JSONArray jsonArray = new JSONArray(content);
|
||||||
|
|
||||||
|
jsonArray.forEach(obj -> this.queue.add(JsonUtils.convertToXML(obj.toString())));
|
||||||
|
}
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.warn("Error calling url: " + url, e);
|
||||||
|
throw new CollectorException("Error calling url: " + url, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -8,7 +8,10 @@ import java.io.StringWriter;
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.Charset;
|
||||||
import java.nio.charset.CharsetDecoder;
|
import java.nio.charset.CharsetDecoder;
|
||||||
import java.nio.charset.CodingErrorAction;
|
import java.nio.charset.CodingErrorAction;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import javax.xml.stream.XMLEventFactory;
|
import javax.xml.stream.XMLEventFactory;
|
||||||
import javax.xml.stream.XMLEventReader;
|
import javax.xml.stream.XMLEventReader;
|
||||||
|
@ -19,6 +22,7 @@ import javax.xml.stream.XMLStreamException;
|
||||||
import javax.xml.stream.events.StartElement;
|
import javax.xml.stream.events.StartElement;
|
||||||
import javax.xml.stream.events.XMLEvent;
|
import javax.xml.stream.events.XMLEvent;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
@ -58,13 +62,23 @@ public class XMLIterator implements Iterator<String> {
|
||||||
|
|
||||||
private String element;
|
private String element;
|
||||||
|
|
||||||
|
private List<String> elements;
|
||||||
|
|
||||||
private InputStream inputStream;
|
private InputStream inputStream;
|
||||||
|
|
||||||
public XMLIterator(final String element, final InputStream inputStream) {
|
public XMLIterator(final String element, final InputStream inputStream) {
|
||||||
super();
|
super();
|
||||||
this.element = element;
|
this.element = element;
|
||||||
|
if (element.contains(",")) {
|
||||||
|
elements = Arrays
|
||||||
|
.stream(element.split(","))
|
||||||
|
.filter(StringUtils::isNoneBlank)
|
||||||
|
.map(String::toLowerCase)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
this.inputStream = inputStream;
|
this.inputStream = inputStream;
|
||||||
this.parser = getParser();
|
this.parser = getParser();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
this.current = findElement(parser);
|
this.current = findElement(parser);
|
||||||
} catch (XMLStreamException e) {
|
} catch (XMLStreamException e) {
|
||||||
|
@ -113,7 +127,7 @@ public class XMLIterator implements Iterator<String> {
|
||||||
final XMLEvent event = parser.nextEvent();
|
final XMLEvent event = parser.nextEvent();
|
||||||
|
|
||||||
// TODO: replace with depth tracking instead of close tag tracking.
|
// TODO: replace with depth tracking instead of close tag tracking.
|
||||||
if (event.isEndElement() && event.asEndElement().getName().getLocalPart().equals(element)) {
|
if (event.isEndElement() && isCheckTag(event.asEndElement().getName().getLocalPart())) {
|
||||||
writer.add(event);
|
writer.add(event);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -142,31 +156,48 @@ public class XMLIterator implements Iterator<String> {
|
||||||
XMLEvent peek = parser.peek();
|
XMLEvent peek = parser.peek();
|
||||||
if (peek != null && peek.isStartElement()) {
|
if (peek != null && peek.isStartElement()) {
|
||||||
String name = peek.asStartElement().getName().getLocalPart();
|
String name = peek.asStartElement().getName().getLocalPart();
|
||||||
if (element.equals(name)) {
|
if (isCheckTag(name))
|
||||||
return peek;
|
return peek;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
while (parser.hasNext()) {
|
while (parser.hasNext()) {
|
||||||
final XMLEvent event = parser.nextEvent();
|
XMLEvent event = parser.nextEvent();
|
||||||
if (event != null && event.isStartElement()) {
|
if (event != null && event.isStartElement()) {
|
||||||
String name = event.asStartElement().getName().getLocalPart();
|
String name = event.asStartElement().getName().getLocalPart();
|
||||||
if (element.equals(name)) {
|
if (isCheckTag(name))
|
||||||
return event;
|
return event;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
private XMLEventReader getParser() {
|
private XMLEventReader getParser() {
|
||||||
try {
|
try {
|
||||||
return inputFactory.get().createXMLEventReader(sanitize(inputStream));
|
XMLInputFactory xif = inputFactory.get();
|
||||||
|
xif.setProperty(XMLInputFactory.SUPPORT_DTD, false);
|
||||||
|
return xif.createXMLEventReader(sanitize(inputStream));
|
||||||
} catch (XMLStreamException e) {
|
} catch (XMLStreamException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean isCheckTag(final String tagName) {
|
||||||
|
if (elements != null) {
|
||||||
|
final String found = elements
|
||||||
|
.stream()
|
||||||
|
.filter(e -> e.equalsIgnoreCase(tagName))
|
||||||
|
.findFirst()
|
||||||
|
.orElse(null);
|
||||||
|
if (found != null)
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
if (element.equalsIgnoreCase(tagName)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
private Reader sanitize(final InputStream in) {
|
private Reader sanitize(final InputStream in) {
|
||||||
final CharsetDecoder charsetDecoder = Charset.forName(UTF_8).newDecoder();
|
final CharsetDecoder charsetDecoder = Charset.forName(UTF_8).newDecoder();
|
||||||
charsetDecoder.onMalformedInput(CodingErrorAction.REPLACE);
|
charsetDecoder.onMalformedInput(CodingErrorAction.REPLACE);
|
||||||
|
|
|
@ -28,7 +28,13 @@
|
||||||
"paramLongName": "dataciteInputPath",
|
"paramLongName": "dataciteInputPath",
|
||||||
"paramDescription": "the path to get the input data from Datacite",
|
"paramDescription": "the path to get the input data from Datacite",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},{
|
||||||
|
"paramName": "wip",
|
||||||
|
"paramLongName": "webCrawlInputPath",
|
||||||
|
"paramDescription": "the path to get the input data from Web Crawl",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
,
|
||||||
{
|
{
|
||||||
"paramName": "wip",
|
"paramName": "wip",
|
||||||
"paramLongName": "webCrawlInputPath",
|
"paramLongName": "webCrawlInputPath",
|
||||||
|
|
|
@ -16,7 +16,8 @@
|
||||||
"paramLongName": "isSparkSessionManged",
|
"paramLongName": "isSparkSessionManged",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
},{
|
},
|
||||||
|
{
|
||||||
"paramName": "nn",
|
"paramName": "nn",
|
||||||
"paramLongName": "nameNode",
|
"paramLongName": "nameNode",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "ip",
|
||||||
|
"paramLongName": "inputPath",
|
||||||
|
"paramDescription": "the zipped opencitations file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "op",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the working path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
}, {
|
||||||
|
"paramName": "wd",
|
||||||
|
"paramLongName": "workingDir",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,2 @@
|
||||||
|
inputPath=/data/orcid_2023/tables/
|
||||||
|
outputPath=/user/miriam.baglioni/peopleAS
|
|
@ -0,0 +1,30 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveDbName</name>
|
||||||
|
<value>openaire</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,111 @@
|
||||||
|
<workflow-app name="PersonEntity" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>inputPath</name>
|
||||||
|
<description>inputPath</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>outputPath</name>
|
||||||
|
<description>the path where to store the actionset</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
<start to="deleteoutputpath"/>
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
<action name="deleteoutputpath">
|
||||||
|
<fs>
|
||||||
|
<delete path="${outputPath}"/>
|
||||||
|
<mkdir path="${outputPath}"/>
|
||||||
|
<delete path="${workingDir}"/>
|
||||||
|
<mkdir path="${workingDir}"/>
|
||||||
|
</fs>
|
||||||
|
<ok to="atomicactions"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="atomicactions">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Produces the ActionSet for Person entity and relevant relations</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.personentity.ExtractPerson</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-cores=4
|
||||||
|
--executor-memory=4G
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=5G
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--inputPath</arg><arg>${inputPath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,20 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "sp",
|
||||||
|
"paramLongName": "sourcePath",
|
||||||
|
"paramDescription": "the zipped opencitations file",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "op",
|
||||||
|
"paramLongName": "outputPath",
|
||||||
|
"paramDescription": "the working path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "the hdfs name node",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,30 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveJdbcUrl</name>
|
||||||
|
<value>jdbc:hive2://iis-cdh5-test-m3.ocean.icm.edu.pl:10000</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveDbName</name>
|
||||||
|
<value>openaire</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,125 @@
|
||||||
|
|
||||||
|
<workflow-app name="SDG no doi" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>sdgPath</name>
|
||||||
|
<description>the input path of the resources to be extended</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>outputPath</name>
|
||||||
|
<description>the path where to store the actionset</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkDriverMemory</name>
|
||||||
|
<description>memory for driver process</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorMemory</name>
|
||||||
|
<description>memory for individual executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkExecutorCores</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<description>oozie action sharelib for spark 2.*</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorAppListener</value>
|
||||||
|
<description>spark 2.* extra listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>com.cloudera.spark.lineage.NavigatorQueryListener</value>
|
||||||
|
<description>spark 2.* sql query execution listeners classname</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<description>spark 2.* yarn history server address</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<description>spark 2.* event log dir location</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
<start to="prepareSDG"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<action name="prepareSDG">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Produces the results from FOS</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.createunresolvedentities.PrepareSDGSparkJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sdgPath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${workingDir}/prepared</arg>
|
||||||
|
<arg>--distributeDoi</arg><arg>false</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="produceActionSet"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<action name="produceActionSet">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Save the action set grouping results with the same id</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.sdgnodoi.CreateActionSetSparkJob</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDir}/prepared/sdg</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
</workflow-app>
|
|
@ -1,3 +1,11 @@
|
||||||
sourcePath=/user/miriam.baglioni/openalex-snapshot/data/works/
|
#PROPERTIES TO CREATE THE ACTION SET
|
||||||
outputPath=/tmp/miriam/webcrawlComplete/
|
#sourcePath=/user/miriam.baglioni/openalex-snapshot/data/works/
|
||||||
blackListPath=/user/miriam.baglioni/openalex-blackList
|
#outputPath=/tmp/miriam/webcrawlComplete/
|
||||||
|
#blackListPath=/user/miriam.baglioni/openalex-blackList
|
||||||
|
#resumeFrom=create
|
||||||
|
|
||||||
|
#PROPERTIES TO REMOVE FROM THE ACTION SET
|
||||||
|
sourcePath=/var/lib/dnet/actionManager_PROD/webcrawl/rawset_28247629-468b-478e-9a42-bc540877125d_1718121542061/
|
||||||
|
outputPath=/tmp/miriam/webcrawlRemoved/
|
||||||
|
blackListPath=/user/miriam.baglioni/oalexBlackListNormalized
|
||||||
|
resumeFrom=remove
|
|
@ -20,12 +20,19 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="create_actionset"/>
|
<start to="resumeFrom"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<decision name="resumeFrom">
|
||||||
|
<switch>
|
||||||
|
<case to="create_actionset">${wf:conf('resumeFrom') eq 'create'}</case>
|
||||||
|
<default to="remove_from_actionset"/>
|
||||||
|
</switch>
|
||||||
|
</decision>
|
||||||
|
|
||||||
<action name="create_actionset">
|
<action name="create_actionset">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -50,5 +57,30 @@
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
<action name="remove_from_actionset">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Removes some relations found to be wrong from the AS</name>
|
||||||
|
<class>eu.dnetlib.dhp.actionmanager.webcrawl.RemoveRelationFromActionSet</class>
|
||||||
|
<jar>dhp-aggregation-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
<arg>--blackListPath</arg><arg>${blackListPath}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -518,7 +518,9 @@ case object Crossref2Oaf {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (doi.startsWith("10.3410") || doi.startsWith("10.12703"))
|
if (doi.startsWith("10.3410") || doi.startsWith("10.12703"))
|
||||||
instance.setHostedby(OafMapperUtils.keyValue(OafMapperUtils.createOpenaireId(10, "openaire____::H1Connect", true),"H1Connect"))
|
instance.setHostedby(
|
||||||
|
OafMapperUtils.keyValue(OafMapperUtils.createOpenaireId(10, "openaire____::H1Connect", true), "H1Connect")
|
||||||
|
)
|
||||||
|
|
||||||
instance.setAccessright(
|
instance.setAccessright(
|
||||||
decideAccessRight(instance.getLicense, result.getDateofacceptance.getValue)
|
decideAccessRight(instance.getLicense, result.getDateofacceptance.getValue)
|
||||||
|
@ -904,7 +906,11 @@ case object Crossref2Oaf {
|
||||||
val targetId = getProjectId("cihr________", "1e5e62235d094afd01cd56e65112fc63")
|
val targetId = getProjectId("cihr________", "1e5e62235d094afd01cd56e65112fc63")
|
||||||
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
||||||
|
// Added mapping for DFG
|
||||||
|
case "10.13039/501100001659" =>
|
||||||
|
val targetId = getProjectId("dfgf________", "1e5e62235d094afd01cd56e65112fc63")
|
||||||
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
|
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
||||||
case "10.13039/100020031" =>
|
case "10.13039/100020031" =>
|
||||||
val targetId = getProjectId("tara________", "1e5e62235d094afd01cd56e65112fc63")
|
val targetId = getProjectId("tara________", "1e5e62235d094afd01cd56e65112fc63")
|
||||||
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
|
@ -1041,6 +1047,7 @@ case object Crossref2Oaf {
|
||||||
tp._1 match {
|
tp._1 match {
|
||||||
case "electronic" => journal.setIssnOnline(tp._2)
|
case "electronic" => journal.setIssnOnline(tp._2)
|
||||||
case "print" => journal.setIssnPrinted(tp._2)
|
case "print" => journal.setIssnPrinted(tp._2)
|
||||||
|
case _ =>
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,12 +2,9 @@ package eu.dnetlib.dhp.sx.bio.ebi
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
import eu.dnetlib.dhp.collection.CollectionUtils
|
import eu.dnetlib.dhp.collection.CollectionUtils
|
||||||
import eu.dnetlib.dhp.common.Constants.{MDSTORE_DATA_PATH, MDSTORE_SIZE_PATH}
|
|
||||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup
|
||||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion
|
import eu.dnetlib.dhp.schema.oaf.Oaf
|
||||||
import eu.dnetlib.dhp.schema.oaf.{Oaf, Result}
|
|
||||||
import eu.dnetlib.dhp.sx.bio.pubmed._
|
import eu.dnetlib.dhp.sx.bio.pubmed._
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils.{MAPPER, writeHdfsFile}
|
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
import org.apache.hadoop.conf.Configuration
|
import org.apache.hadoop.conf.Configuration
|
||||||
|
@ -17,13 +14,13 @@ import org.apache.http.client.methods.HttpGet
|
||||||
import org.apache.http.impl.client.HttpClientBuilder
|
import org.apache.http.impl.client.HttpClientBuilder
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.rdd.RDD
|
import org.apache.spark.rdd.RDD
|
||||||
import org.apache.spark.sql.expressions.Aggregator
|
|
||||||
import org.apache.spark.sql._
|
import org.apache.spark.sql._
|
||||||
|
import org.apache.spark.sql.expressions.Aggregator
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
import java.io.InputStream
|
import java.io.{ByteArrayInputStream, InputStream}
|
||||||
import scala.io.Source
|
import java.nio.charset.Charset
|
||||||
import scala.xml.pull.XMLEventReader
|
import javax.xml.stream.XMLInputFactory
|
||||||
|
|
||||||
object SparkCreateBaselineDataFrame {
|
object SparkCreateBaselineDataFrame {
|
||||||
|
|
||||||
|
@ -86,7 +83,7 @@ object SparkCreateBaselineDataFrame {
|
||||||
if (response.getStatusLine.getStatusCode > 400) {
|
if (response.getStatusLine.getStatusCode > 400) {
|
||||||
tries -= 1
|
tries -= 1
|
||||||
} else
|
} else
|
||||||
return IOUtils.toString(response.getEntity.getContent)
|
return IOUtils.toString(response.getEntity.getContent, Charset.defaultCharset())
|
||||||
} catch {
|
} catch {
|
||||||
case e: Throwable =>
|
case e: Throwable =>
|
||||||
println(s"Error on requesting ${r.getURI}")
|
println(s"Error on requesting ${r.getURI}")
|
||||||
|
@ -158,7 +155,8 @@ object SparkCreateBaselineDataFrame {
|
||||||
IOUtils.toString(
|
IOUtils.toString(
|
||||||
SparkEBILinksToOaf.getClass.getResourceAsStream(
|
SparkEBILinksToOaf.getClass.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/sx/bio/ebi/baseline_to_oaf_params.json"
|
"/eu/dnetlib/dhp/sx/bio/ebi/baseline_to_oaf_params.json"
|
||||||
)
|
),
|
||||||
|
Charset.defaultCharset()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
parser.parseArgument(args)
|
parser.parseArgument(args)
|
||||||
|
@ -167,15 +165,11 @@ object SparkCreateBaselineDataFrame {
|
||||||
val workingPath = parser.get("workingPath")
|
val workingPath = parser.get("workingPath")
|
||||||
log.info("workingPath: {}", workingPath)
|
log.info("workingPath: {}", workingPath)
|
||||||
|
|
||||||
val mdstoreOutputVersion = parser.get("mdstoreOutputVersion")
|
val targetPath = parser.get("targetPath")
|
||||||
log.info("mdstoreOutputVersion: {}", mdstoreOutputVersion)
|
log.info("targetPath: {}", targetPath)
|
||||||
|
|
||||||
val cleanedMdStoreVersion = MAPPER.readValue(mdstoreOutputVersion, classOf[MDStoreVersion])
|
|
||||||
val outputBasePath = cleanedMdStoreVersion.getHdfsPath
|
|
||||||
log.info("outputBasePath: {}", outputBasePath)
|
|
||||||
|
|
||||||
val hdfsServerUri = parser.get("hdfsServerUri")
|
val hdfsServerUri = parser.get("hdfsServerUri")
|
||||||
log.info("hdfsServerUri: {}", hdfsServerUri)
|
log.info("hdfsServerUri: {}", targetPath)
|
||||||
|
|
||||||
val skipUpdate = parser.get("skipUpdate")
|
val skipUpdate = parser.get("skipUpdate")
|
||||||
log.info("skipUpdate: {}", skipUpdate)
|
log.info("skipUpdate: {}", skipUpdate)
|
||||||
|
@ -201,10 +195,11 @@ object SparkCreateBaselineDataFrame {
|
||||||
if (!"true".equalsIgnoreCase(skipUpdate)) {
|
if (!"true".equalsIgnoreCase(skipUpdate)) {
|
||||||
downloadBaseLineUpdate(s"$workingPath/baseline", hdfsServerUri)
|
downloadBaseLineUpdate(s"$workingPath/baseline", hdfsServerUri)
|
||||||
val k: RDD[(String, String)] = sc.wholeTextFiles(s"$workingPath/baseline", 2000)
|
val k: RDD[(String, String)] = sc.wholeTextFiles(s"$workingPath/baseline", 2000)
|
||||||
|
val inputFactory = XMLInputFactory.newInstance
|
||||||
val ds: Dataset[PMArticle] = spark.createDataset(
|
val ds: Dataset[PMArticle] = spark.createDataset(
|
||||||
k.filter(i => i._1.endsWith(".gz"))
|
k.filter(i => i._1.endsWith(".gz"))
|
||||||
.flatMap(i => {
|
.flatMap(i => {
|
||||||
val xml = new XMLEventReader(Source.fromBytes(i._2.getBytes()))
|
val xml = inputFactory.createXMLEventReader(new ByteArrayInputStream(i._2.getBytes()))
|
||||||
new PMParser(xml)
|
new PMParser(xml)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -223,11 +218,8 @@ object SparkCreateBaselineDataFrame {
|
||||||
.map(a => PubMedToOaf.convert(a, vocabularies))
|
.map(a => PubMedToOaf.convert(a, vocabularies))
|
||||||
.as[Oaf]
|
.as[Oaf]
|
||||||
.filter(p => p != null),
|
.filter(p => p != null),
|
||||||
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
targetPath
|
||||||
)
|
)
|
||||||
|
|
||||||
val df = spark.read.text(s"$outputBasePath/$MDSTORE_DATA_PATH")
|
|
||||||
val mdStoreSize = df.count
|
|
||||||
writeHdfsFile(spark.sparkContext.hadoopConfiguration, s"$mdStoreSize", s"$outputBasePath/$MDSTORE_SIZE_PATH")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
package eu.dnetlib.dhp.sx.bio.pubmed
|
package eu.dnetlib.dhp.sx.bio.pubmed
|
||||||
|
|
||||||
import scala.xml.MetaData
|
import scala.xml.MetaData
|
||||||
import scala.xml.pull.{EvElemEnd, EvElemStart, EvText, XMLEventReader}
|
import javax.xml.stream.XMLEventReader
|
||||||
|
import scala.xml.pull.{EvElemEnd, EvElemStart, EvText}
|
||||||
|
|
||||||
/** @param xml
|
/** @param xml
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -15,10 +15,7 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.AfterAll;
|
import org.junit.jupiter.api.*;
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.BeforeAll;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
|
|
@ -119,7 +119,9 @@ public class ReadCOCITest {
|
||||||
workingDir.toString() + "/COCI",
|
workingDir.toString() + "/COCI",
|
||||||
"-outputPath",
|
"-outputPath",
|
||||||
workingDir.toString() + "/COCI_json/",
|
workingDir.toString() + "/COCI_json/",
|
||||||
"-inputFile", "input1;input2;input3;input4;input5"
|
"-inputFile", "input1;input2;input3;input4;input5",
|
||||||
|
"-format",
|
||||||
|
"COCI"
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
|
@ -77,13 +77,13 @@ public class RemapTest {
|
||||||
MapOCIdsInPids
|
MapOCIdsInPids
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManged",
|
"--isSparkSessionManged",
|
||||||
Boolean.FALSE.toString(),
|
Boolean.FALSE.toString(),
|
||||||
"-inputPath",
|
"--inputPath",
|
||||||
inputPath,
|
inputPath,
|
||||||
"-outputPath",
|
"--outputPath",
|
||||||
workingDir.toString() + "/out/",
|
workingDir.toString() + "/out/",
|
||||||
"-nameNode", "input1;input2;input3;input4;input5"
|
"--nameNode", "hdfs://localhost"
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,213 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.person;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.personentity.ExtractPerson;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Person;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
|
|
||||||
|
public class CreatePersonAS {
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(CreatePersonAS.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(CreatePersonAS.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(CreatePersonAS.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.codegen.wholeStage", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(CreatePersonAS.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testAuthors() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/person/")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
// spark
|
||||||
|
// .read()
|
||||||
|
// .parquet(inputPath + "Authors")
|
||||||
|
// .as(Encoders.bean(Author.class))
|
||||||
|
// .filter((FilterFunction<Author>) a -> Optional.ofNullable(a.getOtherNames()).isPresent() &&
|
||||||
|
// Optional.ofNullable(a.getBiography()).isPresent())
|
||||||
|
// .write()
|
||||||
|
// .mode(SaveMode.Overwrite)
|
||||||
|
// .parquet(workingDir.toString() + "AuthorsSubset");
|
||||||
|
|
||||||
|
ExtractPerson
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-inputPath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet1",
|
||||||
|
"-workingDir",
|
||||||
|
workingDir.toString() + "/working"
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> relations = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet1", Text.class, Text.class)
|
||||||
|
.filter(v -> "eu.dnetlib.dhp.schema.oaf.Relation".equalsIgnoreCase(v._1().toString()))
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
//
|
||||||
|
JavaRDD<Person> people = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet1", Text.class, Text.class)
|
||||||
|
.filter(v -> "eu.dnetlib.dhp.schema.oaf.Person".equalsIgnoreCase(v._1().toString()))
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Person) aa.getPayload()));
|
||||||
|
//
|
||||||
|
Assertions.assertEquals(7, people.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"Paulo",
|
||||||
|
people
|
||||||
|
.filter(
|
||||||
|
p -> p.getPid().stream().anyMatch(id -> id.getValue().equalsIgnoreCase("0000-0002-3210-3034")))
|
||||||
|
.first()
|
||||||
|
.getGivenName());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
"Tavares",
|
||||||
|
people
|
||||||
|
.filter(
|
||||||
|
p -> p.getPid().stream().anyMatch(id -> id.getValue().equalsIgnoreCase("0000-0002-3210-3034")))
|
||||||
|
.first()
|
||||||
|
.getFamilyName());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
4,
|
||||||
|
people
|
||||||
|
.filter(
|
||||||
|
p -> p.getPid().stream().anyMatch(id -> id.getValue().equalsIgnoreCase("0000-0002-3210-3034")))
|
||||||
|
.first()
|
||||||
|
.getAlternativeNames()
|
||||||
|
.size());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
4,
|
||||||
|
people
|
||||||
|
.filter(
|
||||||
|
p -> p.getPid().stream().anyMatch(id -> id.getValue().equalsIgnoreCase("0000-0002-3210-3034")))
|
||||||
|
.first()
|
||||||
|
.getPid()
|
||||||
|
.size());
|
||||||
|
Assertions
|
||||||
|
.assertTrue(
|
||||||
|
people
|
||||||
|
.filter(
|
||||||
|
p -> p.getPid().stream().anyMatch(id -> id.getValue().equalsIgnoreCase("0000-0002-3210-3034")))
|
||||||
|
.first()
|
||||||
|
.getPid()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(
|
||||||
|
p -> p.getQualifier().getSchemename().equalsIgnoreCase("Scopus Author ID")
|
||||||
|
&& p.getValue().equalsIgnoreCase("15119405200")));
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
16,
|
||||||
|
relations
|
||||||
|
.filter(r -> r.getRelClass().equalsIgnoreCase(ModelConstants.RESULT_PERSON_HASAUTHORED))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
14,
|
||||||
|
relations
|
||||||
|
.filter(r -> r.getRelClass().equalsIgnoreCase(ModelConstants.PERSON_PERSON_HASCOAUTHORED))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
3,
|
||||||
|
relations
|
||||||
|
.filter(
|
||||||
|
r -> r.getSource().equalsIgnoreCase("30|orcid_______::" + DHPUtils.md5("0000-0001-6291-9619"))
|
||||||
|
&& r.getRelClass().equalsIgnoreCase(ModelConstants.RESULT_PERSON_HASAUTHORED))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2,
|
||||||
|
relations
|
||||||
|
.filter(
|
||||||
|
r -> r.getSource().equalsIgnoreCase("30|orcid_______::" + DHPUtils.md5("0000-0001-6291-9619"))
|
||||||
|
&& r.getRelClass().equalsIgnoreCase(ModelConstants.RESULT_PERSON_HASAUTHORED)
|
||||||
|
&& r.getTarget().startsWith("50|doi"))
|
||||||
|
.count());
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
relations
|
||||||
|
.filter(
|
||||||
|
r -> r.getSource().equalsIgnoreCase("30|orcid_______::" + DHPUtils.md5("0000-0001-6291-9619"))
|
||||||
|
&& r.getRelClass().equalsIgnoreCase(ModelConstants.RESULT_PERSON_HASAUTHORED)
|
||||||
|
&& r.getTarget().startsWith("50|arXiv"))
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1,
|
||||||
|
relations
|
||||||
|
.filter(
|
||||||
|
r -> r.getSource().equalsIgnoreCase("30|orcid_______::" + DHPUtils.md5("0000-0001-6291-9619"))
|
||||||
|
&& r.getRelClass().equalsIgnoreCase(ModelConstants.PERSON_PERSON_HASCOAUTHORED))
|
||||||
|
.count());
|
||||||
|
Assertions.assertEquals(33, relations.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -2,6 +2,7 @@
|
||||||
package eu.dnetlib.dhp.actionmanager.webcrawl;
|
package eu.dnetlib.dhp.actionmanager.webcrawl;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
|
@ -101,7 +102,10 @@ public class CreateASTest {
|
||||||
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
.map(aa -> ((Relation) aa.getPayload()));
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
Assertions.assertEquals(58, tmp.count());
|
tmp.foreach(r -> System.out.println(new ObjectMapper().writeValueAsString(r)));
|
||||||
|
tmp.foreach(r -> assertTrue(r.getSource().startsWith("20|ror") || r.getSource().startsWith("50|doi")));
|
||||||
|
tmp.foreach(r -> assertTrue(r.getTarget().startsWith("20|ror") || r.getTarget().startsWith("50|doi")));
|
||||||
|
Assertions.assertEquals(24, tmp.count());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,7 +116,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
String inputPath = getClass()
|
String inputPath = getClass()
|
||||||
.getResource(
|
.getResource(
|
||||||
"/eu/dnetlib/dhp/actionmanager/webcrawl/")
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/input/")
|
||||||
.getPath();
|
.getPath();
|
||||||
String blackListPath = getClass()
|
String blackListPath = getClass()
|
||||||
.getResource(
|
.getResource(
|
||||||
|
@ -194,7 +198,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2, tmp
|
1, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getSource()
|
.getSource()
|
||||||
|
@ -207,7 +211,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2, tmp
|
1, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getTarget()
|
.getTarget()
|
||||||
|
@ -228,13 +232,13 @@ public class CreateASTest {
|
||||||
"20|ror_________::" + IdentifierFactory
|
"20|ror_________::" + IdentifierFactory
|
||||||
.md5(
|
.md5(
|
||||||
PidCleaner
|
PidCleaner
|
||||||
.normalizePidValue(PidType.doi.toString(), "https://ror.org/03265fv13")))
|
.normalizePidValue("ROR", "https://ror.org/03265fv13")))
|
||||||
&& r.getSource().startsWith("50|doi"))
|
&& r.getSource().startsWith("50|doi"))
|
||||||
.count());
|
.count());
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1, tmp
|
0, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getTarget()
|
.getTarget()
|
||||||
|
@ -268,6 +272,10 @@ public class CreateASTest {
|
||||||
.getResource(
|
.getResource(
|
||||||
"/eu/dnetlib/dhp/actionmanager/webcrawl")
|
"/eu/dnetlib/dhp/actionmanager/webcrawl")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
String blackListPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/blackList/")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
CreateActionSetFromWebEntries
|
CreateActionSetFromWebEntries
|
||||||
.main(
|
.main(
|
||||||
|
@ -277,7 +285,8 @@ public class CreateASTest {
|
||||||
"-sourcePath",
|
"-sourcePath",
|
||||||
inputPath,
|
inputPath,
|
||||||
"-outputPath",
|
"-outputPath",
|
||||||
workingDir.toString() + "/actionSet1"
|
workingDir.toString() + "/actionSet1",
|
||||||
|
"-blackListPath", blackListPath
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
|
@ -0,0 +1,108 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.actionmanager.webcrawl;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidType;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author miriam.baglioni
|
||||||
|
* @Date 22/04/24
|
||||||
|
*/
|
||||||
|
public class RemoveFromASTest {
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(RemoveFromASTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files
|
||||||
|
.createTempDirectory(RemoveFromASTest.class.getSimpleName());
|
||||||
|
log.info("using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(RemoveFromASTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(RemoveFromASTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testNumberofRelations() throws Exception {
|
||||||
|
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/actionSet/")
|
||||||
|
.getPath();
|
||||||
|
String blackListPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/blackListRemove/")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
RemoveRelationFromActionSet
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged",
|
||||||
|
Boolean.FALSE.toString(),
|
||||||
|
"-sourcePath",
|
||||||
|
inputPath,
|
||||||
|
"-outputPath",
|
||||||
|
workingDir.toString() + "/actionSet1",
|
||||||
|
"-blackListPath", blackListPath
|
||||||
|
});
|
||||||
|
|
||||||
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
JavaRDD<Relation> tmp = sc
|
||||||
|
.sequenceFile(workingDir.toString() + "/actionSet1", Text.class, Text.class)
|
||||||
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
|
Assertions.assertEquals(22, tmp.count());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,64 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.file;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.junit.jupiter.api.*;
|
||||||
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
import org.mockito.junit.jupiter.MockitoExtension;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
|
||||||
|
@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
|
||||||
|
@ExtendWith(MockitoExtension.class)
|
||||||
|
public class FileGZipMultipleNodeTest {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(FileGZipCollectorPluginTest.class);
|
||||||
|
|
||||||
|
private final ApiDescriptor api = new ApiDescriptor();
|
||||||
|
|
||||||
|
private FileGZipCollectorPlugin plugin;
|
||||||
|
|
||||||
|
private static final String SPLIT_ON_ELEMENT = "incollection,article";
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void setUp() throws IOException {
|
||||||
|
|
||||||
|
final String gzipFile = Objects
|
||||||
|
.requireNonNull(
|
||||||
|
this
|
||||||
|
.getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/collection/plugin/file/dblp.gz"))
|
||||||
|
.getFile();
|
||||||
|
|
||||||
|
api.setBaseUrl(gzipFile);
|
||||||
|
|
||||||
|
HashMap<String, String> params = new HashMap<>();
|
||||||
|
params.put("splitOnElement", SPLIT_ON_ELEMENT);
|
||||||
|
|
||||||
|
api.setParams(params);
|
||||||
|
|
||||||
|
FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
plugin = new FileGZipCollectorPlugin(fs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void test() throws CollectorException {
|
||||||
|
|
||||||
|
final Stream<String> stream = plugin.collect(api, new AggregatorReport());
|
||||||
|
|
||||||
|
stream.limit(10).forEach(s -> {
|
||||||
|
Assertions.assertTrue(s.length() > 0);
|
||||||
|
log.info(s);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,103 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.gtr2;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Disabled;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
|
class Gtr2PublicationsIteratorTest {
|
||||||
|
|
||||||
|
private static final String baseURL = "https://gtr.ukri.org/gtr/api";
|
||||||
|
|
||||||
|
private static final HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testOne() throws Exception {
|
||||||
|
System.out.println("one publication");
|
||||||
|
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseURL, null, null, null, clientParams);
|
||||||
|
|
||||||
|
if (iterator.hasNext()) {
|
||||||
|
final String res = iterator.next();
|
||||||
|
assertNotNull(res);
|
||||||
|
System.out.println(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testPaging() throws Exception {
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseURL, null, "2", "2", clientParams);
|
||||||
|
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
Thread.sleep(300);
|
||||||
|
final String res = iterator.next();
|
||||||
|
assertNotNull(res);
|
||||||
|
System.out.println(res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testOnePage() throws Exception {
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseURL, null, "12", "12", clientParams);
|
||||||
|
final int count = iterateAndCount(iterator);
|
||||||
|
assertEquals(20, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testIncrementalHarvestingNoRecords() throws Exception {
|
||||||
|
System.out.println("incremental Harvesting");
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseURL, "2050-12-12T", "11", "13",
|
||||||
|
clientParams);
|
||||||
|
final int count = iterateAndCount(iterator);
|
||||||
|
assertEquals(0, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testIncrementalHarvesting() throws Exception {
|
||||||
|
System.out.println("incremental Harvesting");
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseURL, "2016-11-30", "11", "11", clientParams);
|
||||||
|
final int count = iterateAndCount(iterator);
|
||||||
|
assertEquals(20, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
public void testCompleteHarvesting() throws Exception {
|
||||||
|
System.out.println("testing complete harvesting");
|
||||||
|
final Iterator<String> iterator = new Gtr2PublicationsIterator(baseURL, null, null, null, clientParams);
|
||||||
|
// TryIndentXmlString indenter = new TryIndentXmlString();
|
||||||
|
// it.setEndAtPage(3);
|
||||||
|
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
final String res = iterator.next();
|
||||||
|
assertNotNull(res);
|
||||||
|
// System.out.println(res);
|
||||||
|
// Scanner keyboard = new Scanner(System.in);
|
||||||
|
// System.out.println("press enter for next record");
|
||||||
|
// keyboard.nextLine();
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private int iterateAndCount(final Iterator<String> iterator) throws Exception {
|
||||||
|
int i = 0;
|
||||||
|
while (iterator.hasNext()) {
|
||||||
|
assertNotNull(iterator.next());
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
System.out.println("Got " + i + " publications");
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,122 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.osf;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
import static org.junit.jupiter.api.Assertions.fail;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Disabled;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.collection.plugin.utils.JsonUtils;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpConnector2;
|
||||||
|
|
||||||
|
public class OsfPreprintsCollectorPluginTest {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(OsfPreprintsCollectorPlugin.class);
|
||||||
|
|
||||||
|
private final String baseUrl = "https://api.osf.io/v2/preprints/";
|
||||||
|
|
||||||
|
private final int pageSize = 100;
|
||||||
|
|
||||||
|
private final ApiDescriptor api = new ApiDescriptor();
|
||||||
|
|
||||||
|
private OsfPreprintsCollectorPlugin plugin;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void setUp() {
|
||||||
|
final HashMap<String, String> params = new HashMap<>();
|
||||||
|
params.put("pageSize", "" + this.pageSize);
|
||||||
|
|
||||||
|
this.api.setBaseUrl(this.baseUrl);
|
||||||
|
this.api.setParams(params);
|
||||||
|
|
||||||
|
this.plugin = new OsfPreprintsCollectorPlugin(new HttpClientParams());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void test_one() throws CollectorException {
|
||||||
|
this.plugin
|
||||||
|
.collect(this.api, new AggregatorReport())
|
||||||
|
.limit(1)
|
||||||
|
.forEach(log::info);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void test_limited() throws CollectorException {
|
||||||
|
final AtomicInteger i = new AtomicInteger(0);
|
||||||
|
final Stream<String> stream = this.plugin.collect(this.api, new AggregatorReport());
|
||||||
|
|
||||||
|
stream.limit(2000).forEach(s -> {
|
||||||
|
Assertions.assertTrue(s.length() > 0);
|
||||||
|
i.incrementAndGet();
|
||||||
|
log.info(s);
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("{}", i.intValue());
|
||||||
|
Assertions.assertTrue(i.intValue() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void test_all() throws CollectorException {
|
||||||
|
final AtomicLong i = new AtomicLong(0);
|
||||||
|
final Stream<String> stream = this.plugin.collect(this.api, new AggregatorReport());
|
||||||
|
|
||||||
|
stream.forEach(s -> {
|
||||||
|
Assertions.assertTrue(s.length() > 0);
|
||||||
|
if ((i.incrementAndGet() % 1000) == 0) {
|
||||||
|
log.info("COLLECTED: {}", i.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("TOTAL: {}", i.get());
|
||||||
|
Assertions.assertTrue(i.get() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void test_authentication_required() {
|
||||||
|
final HttpConnector2 connector = new HttpConnector2();
|
||||||
|
|
||||||
|
try {
|
||||||
|
final String res = connector
|
||||||
|
.getInputSource("https://api.osf.io/v2/preprints/ydtzx/contributors/?format=json");
|
||||||
|
System.out.println(res);
|
||||||
|
fail();
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
|
||||||
|
System.out.println("**** ERROR: " + e.getMessage());
|
||||||
|
|
||||||
|
if ((e instanceof CollectorException) && e.getMessage().contains("401")) {
|
||||||
|
System.out.println(" XML: " + DocumentHelper.createDocument().getRootElement().detach());
|
||||||
|
}
|
||||||
|
|
||||||
|
assertTrue(e.getMessage().contains("401"));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testXML() {
|
||||||
|
final String xml = JsonUtils.convertToXML("{'next':null}");
|
||||||
|
System.out.println(xml);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.collection.plugin.researchfi;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
|
import org.dom4j.DocumentException;
|
||||||
|
import org.dom4j.DocumentHelper;
|
||||||
|
import org.junit.jupiter.api.Disabled;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
||||||
|
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
|
||||||
|
public class ResearchFiCollectorPluginTest {
|
||||||
|
|
||||||
|
private final ResearchFiCollectorPlugin plugin = new ResearchFiCollectorPlugin();
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void testCollect() throws CollectorException {
|
||||||
|
final ApiDescriptor api = new ApiDescriptor();
|
||||||
|
api.setBaseUrl("https://research.fi/api/rest/v1/funding-decisions?FunderName=AKA&FundingStartYearFrom=2022");
|
||||||
|
api.setProtocol("research_fi");
|
||||||
|
api
|
||||||
|
.getParams()
|
||||||
|
.put("auth_url", "https://researchfi-auth.2.rahtiapp.fi/realms/publicapi/protocol/openid-connect/token");
|
||||||
|
api.getParams().put("auth_client_id", "");
|
||||||
|
api.getParams().put("auth_client_secret", "");
|
||||||
|
|
||||||
|
final AtomicLong count = new AtomicLong(0);
|
||||||
|
final Set<String> ids = new HashSet<>();
|
||||||
|
|
||||||
|
this.plugin.collect(api, new AggregatorReport()).forEach(s -> {
|
||||||
|
|
||||||
|
if (count.getAndIncrement() == 0) {
|
||||||
|
System.out.println("First: " + s);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
final String id = DocumentHelper.parseText(s).valueOf("/recordWrap/funderProjectNumber");
|
||||||
|
if (ids.contains(id)) {
|
||||||
|
System.out.println("Id already present: " + id);
|
||||||
|
}
|
||||||
|
ids.add(id);
|
||||||
|
} catch (final DocumentException e) {
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
System.out.println("Total records: " + count);
|
||||||
|
System.out.println("Total identifiers: " + ids.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,105 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.collection.plugin.rest;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
|
||||||
import org.junit.jupiter.api.Disabled;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.collection.ApiDescriptor;
|
|
||||||
import eu.dnetlib.dhp.common.aggregation.AggregatorReport;
|
|
||||||
import eu.dnetlib.dhp.common.collection.CollectorException;
|
|
||||||
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
|
||||||
|
|
||||||
public class OsfPreprintCollectorTest {
|
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(OsfPreprintCollectorTest.class);
|
|
||||||
|
|
||||||
private final String baseUrl = "https://api.osf.io/v2/preprints/";
|
|
||||||
|
|
||||||
// private final String requestHeaderMap = "";
|
|
||||||
// private final String authMethod = "";
|
|
||||||
// private final String authToken = "";
|
|
||||||
// private final String resultOutputFormat = "";
|
|
||||||
|
|
||||||
private final String queryParams = "filter:is_published:d=true";
|
|
||||||
|
|
||||||
private final String entityXpath = "/*/*[local-name()='data']";
|
|
||||||
|
|
||||||
private final String resultTotalXpath = "/*/*[local-name()='links']/*[local-name()='meta']/*[local-name()='total']";
|
|
||||||
|
|
||||||
private final String resumptionParam = "page";
|
|
||||||
private final String resumptionType = "page";
|
|
||||||
private final String resumptionXpath = "/*/*[local-name()='links']/*[local-name()='next']";
|
|
||||||
|
|
||||||
private final String resultSizeParam = "page[size]";
|
|
||||||
private final String resultSizeValue = "100";
|
|
||||||
|
|
||||||
private final String resultFormatParam = "format";
|
|
||||||
private final String resultFormatValue = "json";
|
|
||||||
|
|
||||||
private final ApiDescriptor api = new ApiDescriptor();
|
|
||||||
private RestCollectorPlugin rcp;
|
|
||||||
|
|
||||||
@BeforeEach
|
|
||||||
public void setUp() {
|
|
||||||
final HashMap<String, String> params = new HashMap<>();
|
|
||||||
params.put("resumptionType", this.resumptionType);
|
|
||||||
params.put("resumptionParam", this.resumptionParam);
|
|
||||||
params.put("resumptionXpath", this.resumptionXpath);
|
|
||||||
params.put("resultTotalXpath", this.resultTotalXpath);
|
|
||||||
params.put("resultFormatParam", this.resultFormatParam);
|
|
||||||
params.put("resultFormatValue", this.resultFormatValue);
|
|
||||||
params.put("resultSizeParam", this.resultSizeParam);
|
|
||||||
params.put("resultSizeValue", this.resultSizeValue);
|
|
||||||
params.put("queryParams", this.queryParams);
|
|
||||||
params.put("entityXpath", this.entityXpath);
|
|
||||||
|
|
||||||
this.api.setBaseUrl(this.baseUrl);
|
|
||||||
this.api.setParams(params);
|
|
||||||
|
|
||||||
this.rcp = new RestCollectorPlugin(new HttpClientParams());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Disabled
|
|
||||||
void test_limited() throws CollectorException {
|
|
||||||
final AtomicInteger i = new AtomicInteger(0);
|
|
||||||
final Stream<String> stream = this.rcp.collect(this.api, new AggregatorReport());
|
|
||||||
|
|
||||||
stream.limit(2000).forEach(s -> {
|
|
||||||
Assertions.assertTrue(s.length() > 0);
|
|
||||||
i.incrementAndGet();
|
|
||||||
log.info(s);
|
|
||||||
});
|
|
||||||
|
|
||||||
log.info("{}", i.intValue());
|
|
||||||
Assertions.assertTrue(i.intValue() > 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Disabled
|
|
||||||
void test_all() throws CollectorException {
|
|
||||||
final AtomicLong i = new AtomicLong(0);
|
|
||||||
final Stream<String> stream = this.rcp.collect(this.api, new AggregatorReport());
|
|
||||||
|
|
||||||
stream.forEach(s -> {
|
|
||||||
Assertions.assertTrue(s.length() > 0);
|
|
||||||
if ((i.incrementAndGet() % 1000) == 0) {
|
|
||||||
log.info("COLLECTED: {}", i.get());
|
|
||||||
}
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
log.info("TOTAL: {}", i.get());
|
|
||||||
Assertions.assertTrue(i.get() > 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
{"orcid":"0000-0001-6291-9619","title":"A Visible Light Driven Photoelectrochemical Chloramphenicol Aptasensor Based on a Gold Nanoparticle-Functionalized 3D Flower-like MoS<sub>2</sub>/TiO<sub>2</sub> Heterostructure","pids":[{"value":"10.1021/acs.langmuir.1c02956","schema":"doi"},{"value":"2-s2.0-85124885368","schema":"eid"},{"value":"15205827 07437463","schema":"issn"}]}
|
||||||
|
{"orcid":"0000-0002-3210-3034","title":"A Visible Light Driven Photoelectrochemical Chloramphenicol Aptasensor Based on a Gold Nanoparticle-Functionalized 3D Flower-like MoS<sub>2</sub>/TiO<sub>2</sub> Heterostructure","pids":[{"value":"10.1021/acs.langmuir.1c02956","schema":"doi"},{"value":"2-s2.0-85124885368","schema":"eid"},{"value":"15205827 07437463","schema":"issn"}]}
|
||||||
|
{"orcid":"0000-0001-6291-9619","title":"Study of High-Transverse-Momentum Higgs Boson Production in Association with a Vector Boson in the <math display=\"inline\"><mrow><mi>q</mi><mi>q</mi><mi>b</mi><mi>b</mi></mrow></math> Final State with the ATLAS Detector","pids":[{"value":"2736741","schema":"other-id"},{"value":"10.1103/PhysRevLett.132.131802","schema":"doi"},{"value":"2312.07605","schema":"arxiv"}]}
|
||||||
|
{"orcid":"0000-0002-3210-3034","title":"Study of High-Transverse-Momentum Higgs Boson Production in Association with a Vector Boson in the <math display=\"inline\"><mrow><mi>q</mi><mi>q</mi><mi>b</mi><mi>b</mi></mrow></math> Final State with the ATLAS Detector","pids":[{"value":"2736741","schema":"other-id"},{"value":"10.1103/PhysRevLett.132.131802","schema":"doi"},{"value":"2312.07605","schema":"arxiv"}]}
|
||||||
|
{"orcid":"0000-0002-9030-7609","title":"Search for supersymmetry in a final state containing two photons and missing transverse momentum in √s = 13 TeV pp collisions at the LHC using the ATLAS detector","pids":[{"value":"10.1140/epjc/s10052-016-4344-x","schema":"doi"},{"value":"2-s2.0-84988710988","schema":"eid"},{"value":"14346052 14346044","schema":"issn"}]}
|
||||||
|
{"orcid":"0000-0003-2552-9691","title":"Search for supersymmetry in a final state containing two photons and missing transverse momentum in $\\sqrt{s}$ = 13 TeV $pp$ collisions at the LHC using the ATLAS detector","pids":[{"value":"1473744","schema":"other-id"},{"value":"10.1140/epjc/s10052-016-4344-x","schema":"doi"},{"value":"1606.09150","schema":"arxiv"}]}
|
||||||
|
{"orcid":"0000-0003-0305-8980","title":"Search for supersymmetry in a final state containing two photons and missing transverse momentum in √s = 13 TeV pp collisions at the LHC using the ATLAS detector","pids":[{"value":"10.1140/epjc/s10052-016-4344-x","schema":"doi"},{"value":"2-s2.0-84988710988","schema":"eid"}]}
|
||||||
|
{"orcid":"0000-0002-9030-7609","title":"Measurement of the energy response of the ATLAS calorimeter to charged pions from $W^{\\pm }\\rightarrow \\tau ^{\\pm }(\\rightarrow \\pi ^{\\pm }\\nu _{\\tau })\\nu _{\\tau }$ events in Run 2 data","pids":[{"value":"1909507","schema":"other-id"},{"value":"10.1140/epjc/s10052-022-10117-2","schema":"doi"},{"value":"2108.09043","schema":"arxiv"}]}
|
||||||
|
{"orcid":"0000-0003-2629-4046","title":"Measurement of the energy response of the ATLAS calorimeter to charged pions from $W^{\\pm }\\rightarrow \\tau ^{\\pm }(\\rightarrow \\pi ^{\\pm }\\nu _{\\tau })\\nu _{\\tau }$ events in Run 2 data","pids":[{"value":"1909507","schema":"other-id"},{"value":"10.1140/epjc/s10052-022-10117-2","schema":"doi"},{"value":"2108.09043","schema":"arxiv"}]}
|
||||||
|
{"orcid":"0000-0001-8582-8912","title":"Measurement of the energy response of the ATLAS calorimeter to charged pions from $W^{\\pm }\\rightarrow \\tau ^{\\pm }(\\rightarrow \\pi ^{\\pm }\\nu _{\\tau })\\nu _{\\tau }$ events in Run 2 data","pids":[{"value":"1909507","schema":"other-id"},{"value":"10.1140/epjc/s10052-022-10117-2","schema":"doi"},{"value":"2108.09043","schema":"arxiv"}]}
|
|
@ -0,0 +1 @@
|
||||||
|
{"doi":"https://doi.org/10.1098/rstl.1684.0023","OpenAlexId":"https://openalex.org/W2124362779"}
|
|
@ -789,10 +789,6 @@
|
||||||
"value": "2227-9717",
|
"value": "2227-9717",
|
||||||
"type": "electronic"
|
"type": "electronic"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"value": "VALUE",
|
|
||||||
"type": "PIPPO"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"value": "1063-4584",
|
"value": "1063-4584",
|
||||||
"type": "pu"
|
"type": "pu"
|
||||||
|
|
Binary file not shown.
|
@ -2,7 +2,9 @@ package eu.dnetlib.dhp.collection.crossref
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
import eu.dnetlib.dhp.aggregation.AbstractVocabularyTest
|
||||||
import org.junit.jupiter.api.BeforeEach
|
import eu.dnetlib.dhp.collection.crossref.Crossref2Oaf.TransformationType
|
||||||
|
import org.apache.commons.io.IOUtils
|
||||||
|
import org.junit.jupiter.api.{BeforeEach, Test}
|
||||||
import org.junit.jupiter.api.extension.ExtendWith
|
import org.junit.jupiter.api.extension.ExtendWith
|
||||||
import org.mockito.junit.jupiter.MockitoExtension
|
import org.mockito.junit.jupiter.MockitoExtension
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
@ -18,4 +20,13 @@ class CrossrefMappingTest extends AbstractVocabularyTest {
|
||||||
super.setUpVocabulary()
|
super.setUpVocabulary()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
def mappingRecord(): Unit = {
|
||||||
|
val input =
|
||||||
|
IOUtils.toString(getClass.getResourceAsStream("/eu/dnetlib/dhp/collection/crossref/issn_pub.json"), "utf-8")
|
||||||
|
|
||||||
|
println(Crossref2Oaf.convert(input, vocabularies, TransformationType.All))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ class MAGMappingTest {
|
||||||
@Test
|
@Test
|
||||||
def mappingMagType(): Unit = {
|
def mappingMagType(): Unit = {
|
||||||
|
|
||||||
checkResult[Publication](MagUtility.createResultFromType(null, null), invisible = false, "Other literature type")
|
checkResult[Publication](MagUtility.createResultFromType(null, null), invisible = true, "Other literature type")
|
||||||
checkResult[Publication](
|
checkResult[Publication](
|
||||||
MagUtility.createResultFromType(Some("BookChapter"), null),
|
MagUtility.createResultFromType(Some("BookChapter"), null),
|
||||||
invisible = false,
|
invisible = false,
|
||||||
|
|
|
@ -16,6 +16,7 @@ import org.mockito.junit.jupiter.MockitoExtension
|
||||||
|
|
||||||
import java.io.{BufferedReader, InputStream, InputStreamReader}
|
import java.io.{BufferedReader, InputStream, InputStreamReader}
|
||||||
import java.util.zip.GZIPInputStream
|
import java.util.zip.GZIPInputStream
|
||||||
|
import javax.xml.stream.XMLInputFactory
|
||||||
import scala.collection.JavaConverters._
|
import scala.collection.JavaConverters._
|
||||||
import scala.collection.mutable.ListBuffer
|
import scala.collection.mutable.ListBuffer
|
||||||
import scala.io.Source
|
import scala.io.Source
|
||||||
|
@ -49,10 +50,8 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testEBIData() = {
|
def testEBIData() = {
|
||||||
val inputXML = Source
|
val inputFactory = XMLInputFactory.newInstance
|
||||||
.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
val xml = inputFactory.createXMLEventReader(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||||
.mkString
|
|
||||||
val xml = new XMLEventReader(Source.fromBytes(inputXML.getBytes()))
|
|
||||||
new PMParser(xml).foreach(s => println(mapper.writeValueAsString(s)))
|
new PMParser(xml).foreach(s => println(mapper.writeValueAsString(s)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,9 +90,10 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
def testParsingPubmedXML(): Unit = {
|
def testParsingPubmedXML(): Unit = {
|
||||||
val xml = new XMLEventReader(
|
val inputFactory = XMLInputFactory.newInstance
|
||||||
Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
|
||||||
)
|
val xml = inputFactory.createXMLEventReader(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||||
|
|
||||||
val parser = new PMParser(xml)
|
val parser = new PMParser(xml)
|
||||||
parser.foreach(checkPMArticle)
|
parser.foreach(checkPMArticle)
|
||||||
}
|
}
|
||||||
|
@ -156,9 +156,9 @@ class BioScholixTest extends AbstractVocabularyTest {
|
||||||
@Test
|
@Test
|
||||||
def testPubmedMapping(): Unit = {
|
def testPubmedMapping(): Unit = {
|
||||||
|
|
||||||
val xml = new XMLEventReader(
|
val inputFactory = XMLInputFactory.newInstance
|
||||||
Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
val xml = inputFactory.createXMLEventReader(getClass.getResourceAsStream("/eu/dnetlib/dhp/sx/graph/bio/pubmed.xml"))
|
||||||
)
|
|
||||||
val parser = new PMParser(xml)
|
val parser = new PMParser(xml)
|
||||||
val results = ListBuffer[Oaf]()
|
val results = ListBuffer[Oaf]()
|
||||||
parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies))
|
parser.foreach(x => results += PubMedToOaf.convert(x, vocabularies))
|
||||||
|
|
|
@ -53,24 +53,10 @@
|
||||||
<artifactId>dhp-pace-core</artifactId>
|
<artifactId>dhp-pace-core</artifactId>
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>commons-lang3</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.scala-lang.modules</groupId>
|
|
||||||
<artifactId>scala-java8-compat_${scala.binary.version}</artifactId>
|
|
||||||
<version>1.0.2</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.scala-lang.modules</groupId>
|
|
||||||
<artifactId>scala-collection-compat_${scala.binary.version}</artifactId>
|
|
||||||
<version>2.11.0</version>
|
|
||||||
</dependency>
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.spark</groupId>
|
<groupId>org.apache.spark</groupId>
|
||||||
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
||||||
|
@ -79,16 +65,10 @@
|
||||||
<groupId>org.apache.spark</groupId>
|
<groupId>org.apache.spark</groupId>
|
||||||
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.spark</groupId>
|
<groupId>org.apache.spark</groupId>
|
||||||
<artifactId>spark-graphx_${scala.binary.version}</artifactId>
|
<artifactId>spark-graphx_${scala.binary.version}</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
|
||||||
<groupId>com.arakelian</groupId>
|
|
||||||
<artifactId>java-jq</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>dom4j</groupId>
|
<groupId>dom4j</groupId>
|
||||||
<artifactId>dom4j</artifactId>
|
<artifactId>dom4j</artifactId>
|
||||||
|
@ -101,10 +81,6 @@
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
<groupId>com.fasterxml.jackson.core</groupId>
|
||||||
<artifactId>jackson-databind</artifactId>
|
<artifactId>jackson-databind</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>com.fasterxml.jackson.core</groupId>
|
|
||||||
<artifactId>jackson-core</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.httpcomponents</groupId>
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
<artifactId>httpclient</artifactId>
|
<artifactId>httpclient</artifactId>
|
||||||
|
|
|
@ -42,6 +42,7 @@ import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpException;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
import eu.dnetlib.pace.config.DedupConfig;
|
import eu.dnetlib.pace.config.DedupConfig;
|
||||||
|
import eu.dnetlib.pace.util.SparkCompatUtils;
|
||||||
import scala.Tuple3;
|
import scala.Tuple3;
|
||||||
import scala.collection.JavaConversions;
|
import scala.collection.JavaConversions;
|
||||||
|
|
||||||
|
@ -148,8 +149,7 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
||||||
Dataset<Row> pivotHistory = spark
|
Dataset<Row> pivotHistory = spark
|
||||||
.createDataset(
|
.createDataset(
|
||||||
Collections.emptyList(),
|
Collections.emptyList(),
|
||||||
RowEncoder
|
SparkCompatUtils.encoderFor(StructType.fromDDL("id STRING, lastUsage STRING")));
|
||||||
.apply(StructType.fromDDL("id STRING, lastUsage STRING")));
|
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(pivotHistoryDatabase)) {
|
if (StringUtils.isNotBlank(pivotHistoryDatabase)) {
|
||||||
pivotHistory = spark
|
pivotHistory = spark
|
||||||
|
|
|
@ -8,7 +8,6 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.api.java.function.ReduceFunction;
|
import org.apache.spark.api.java.function.ReduceFunction;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
import org.apache.spark.sql.catalyst.encoders.RowEncoder;
|
|
||||||
import org.apache.spark.sql.types.StructType;
|
import org.apache.spark.sql.types.StructType;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -23,6 +22,7 @@ import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
import eu.dnetlib.pace.util.SparkCompatUtils;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
import scala.Tuple3;
|
import scala.Tuple3;
|
||||||
|
|
||||||
|
@ -145,7 +145,7 @@ public class SparkPropagateRelation extends AbstractSparkAction {
|
||||||
StructType idsSchema = StructType
|
StructType idsSchema = StructType
|
||||||
.fromDDL("`id` STRING, `dataInfo` STRUCT<`deletedbyinference`:BOOLEAN,`invisible`:BOOLEAN>");
|
.fromDDL("`id` STRING, `dataInfo` STRUCT<`deletedbyinference`:BOOLEAN,`invisible`:BOOLEAN>");
|
||||||
|
|
||||||
Dataset<Row> allIds = spark.emptyDataset(RowEncoder.apply(idsSchema));
|
Dataset<Row> allIds = spark.emptyDataset(SparkCompatUtils.encoderFor(idsSchema));
|
||||||
|
|
||||||
for (EntityType entityType : ModelSupport.entityTypes.keySet()) {
|
for (EntityType entityType : ModelSupport.entityTypes.keySet()) {
|
||||||
String entityPath = graphBasePath + '/' + entityType.name();
|
String entityPath = graphBasePath + '/' + entityType.name();
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -190,7 +190,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
System.out.println("orp_simrel = " + orp_simrel);
|
System.out.println("orp_simrel = " + orp_simrel);
|
||||||
|
|
||||||
if (CHECK_CARDINALITIES) {
|
if (CHECK_CARDINALITIES) {
|
||||||
assertEquals(751, orgs_simrel);
|
assertEquals(742, orgs_simrel);
|
||||||
assertEquals(566, pubs_simrel);
|
assertEquals(566, pubs_simrel);
|
||||||
assertEquals(113, sw_simrel);
|
assertEquals(113, sw_simrel);
|
||||||
assertEquals(148, ds_simrel);
|
assertEquals(148, ds_simrel);
|
||||||
|
@ -251,7 +251,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
|
|
||||||
// entities simrels supposed to be equal to the number of previous step (no rels in whitelist)
|
// entities simrels supposed to be equal to the number of previous step (no rels in whitelist)
|
||||||
if (CHECK_CARDINALITIES) {
|
if (CHECK_CARDINALITIES) {
|
||||||
assertEquals(751, orgs_simrel);
|
assertEquals(742, orgs_simrel);
|
||||||
assertEquals(566, pubs_simrel);
|
assertEquals(566, pubs_simrel);
|
||||||
assertEquals(148, ds_simrel);
|
assertEquals(148, ds_simrel);
|
||||||
assertEquals(280, orp_simrel);
|
assertEquals(280, orp_simrel);
|
||||||
|
@ -442,7 +442,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
final List<Relation> merges = pubs
|
final List<Relation> merges = pubs
|
||||||
.filter("source == '50|arXiv_dedup_::c93aeb433eb90ed7a86e29be00791b7c'")
|
.filter("source == '50|arXiv_dedup_::c93aeb433eb90ed7a86e29be00791b7c'")
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
assertEquals(3, merges.size());
|
assertEquals(1, merges.size());
|
||||||
Set<String> dups = Sets
|
Set<String> dups = Sets
|
||||||
.newHashSet(
|
.newHashSet(
|
||||||
"50|doi_________::3b1d0d8e8f930826665df9d6b82fbb73",
|
"50|doi_________::3b1d0d8e8f930826665df9d6b82fbb73",
|
||||||
|
@ -451,7 +451,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
merges.forEach(r -> {
|
merges.forEach(r -> {
|
||||||
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
||||||
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
||||||
assertEquals(ModelConstants.MERGES, r.getRelClass());
|
assertEquals(ModelConstants.IS_MERGED_IN, r.getRelClass());
|
||||||
assertTrue(dups.contains(r.getTarget()));
|
assertTrue(dups.contains(r.getTarget()));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -561,7 +561,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
System.out.println("orp_mergerel = " + orp_mergerel);
|
System.out.println("orp_mergerel = " + orp_mergerel);
|
||||||
|
|
||||||
if (CHECK_CARDINALITIES) {
|
if (CHECK_CARDINALITIES) {
|
||||||
assertEquals(1268, orgs_mergerel);
|
assertEquals(1278, orgs_mergerel);
|
||||||
assertEquals(1156, pubs.count());
|
assertEquals(1156, pubs.count());
|
||||||
assertEquals(292, sw_mergerel);
|
assertEquals(292, sw_mergerel);
|
||||||
assertEquals(476, ds_mergerel);
|
assertEquals(476, ds_mergerel);
|
||||||
|
@ -618,7 +618,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
System.out.println("orp_deduprecord = " + orp_deduprecord);
|
System.out.println("orp_deduprecord = " + orp_deduprecord);
|
||||||
|
|
||||||
if (CHECK_CARDINALITIES) {
|
if (CHECK_CARDINALITIES) {
|
||||||
assertEquals(86, orgs_deduprecord);
|
assertEquals(78, orgs_deduprecord);
|
||||||
assertEquals(96, pubs.count());
|
assertEquals(96, pubs.count());
|
||||||
assertEquals(47, sw_deduprecord);
|
assertEquals(47, sw_deduprecord);
|
||||||
assertEquals(97, ds_deduprecord);
|
assertEquals(97, ds_deduprecord);
|
||||||
|
@ -761,7 +761,7 @@ public class SparkDedupTest implements Serializable {
|
||||||
|
|
||||||
if (CHECK_CARDINALITIES) {
|
if (CHECK_CARDINALITIES) {
|
||||||
assertEquals(930, publications);
|
assertEquals(930, publications);
|
||||||
assertEquals(839, organizations);
|
assertEquals(831, organizations);
|
||||||
assertEquals(100, projects);
|
assertEquals(100, projects);
|
||||||
assertEquals(100, datasource);
|
assertEquals(100, datasource);
|
||||||
assertEquals(196, softwares);
|
assertEquals(196, softwares);
|
||||||
|
|
|
@ -22,8 +22,11 @@ import java.util.Properties;
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.Row;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.junit.jupiter.api.*;
|
import org.junit.jupiter.api.*;
|
||||||
import org.junit.jupiter.api.extension.ExtendWith;
|
import org.junit.jupiter.api.extension.ExtendWith;
|
||||||
|
@ -143,7 +146,7 @@ public class SparkOpenorgsDedupTest implements Serializable {
|
||||||
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "organization"))
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "organization"))
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(86, orgs_simrel);
|
assertEquals(92, orgs_simrel);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -172,7 +175,7 @@ public class SparkOpenorgsDedupTest implements Serializable {
|
||||||
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "organization"))
|
.load(DedupUtility.createSimRelPath(testOutputBasePath, testActionSetId, "organization"))
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(122, orgs_simrel);
|
assertEquals(128, orgs_simrel);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -207,7 +210,7 @@ public class SparkOpenorgsDedupTest implements Serializable {
|
||||||
.read()
|
.read()
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
.load(testOutputBasePath + "/" + testActionSetId + "/organization_mergerel")
|
||||||
.count();
|
.count();
|
||||||
assertEquals(132, orgs_mergerel);
|
assertEquals(128, orgs_mergerel);
|
||||||
|
|
||||||
// verify that a DiffRel is in the mergerels (to be sure that the job supposed to remove them has something to
|
// verify that a DiffRel is in the mergerels (to be sure that the job supposed to remove them has something to
|
||||||
// do)
|
// do)
|
||||||
|
|
|
@ -143,7 +143,9 @@ public class SparkPublicationRootsTest2 implements Serializable {
|
||||||
"--graphBasePath", graphInputPath,
|
"--graphBasePath", graphInputPath,
|
||||||
"--actionSetId", testActionSetId,
|
"--actionSetId", testActionSetId,
|
||||||
"--isLookUpUrl", "lookupurl",
|
"--isLookUpUrl", "lookupurl",
|
||||||
"--workingPath", workingPath
|
"--workingPath", workingPath,
|
||||||
|
"--hiveMetastoreUris", "none",
|
||||||
|
"--pivotHistoryDatabase", "none"
|
||||||
}), spark)
|
}), spark)
|
||||||
.run(isLookUpService);
|
.run(isLookUpService);
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ import org.junit.jupiter.api.Assertions;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.junit.platform.commons.util.StringUtils;
|
import org.junit.platform.commons.util.StringUtils;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.oa.dedup.SparkOpenorgsDedupTest;
|
||||||
import eu.dnetlib.pace.config.DedupConfig;
|
import eu.dnetlib.pace.config.DedupConfig;
|
||||||
import eu.dnetlib.pace.model.SparkModel;
|
import eu.dnetlib.pace.model.SparkModel;
|
||||||
|
|
||||||
|
@ -24,6 +25,31 @@ class JsonPathTest {
|
||||||
|
|
||||||
Row row = SparkModel.apply(conf).rowFromJson(org);
|
Row row = SparkModel.apply(conf).rowFromJson(org);
|
||||||
|
|
||||||
|
System.out.println("row = " + row);
|
||||||
|
Assertions.assertNotNull(row);
|
||||||
|
Assertions.assertTrue(StringUtils.isNotBlank(row.getAs("identifier")));
|
||||||
|
|
||||||
|
System.out.println("row = " + row.getAs("countrytitle"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void jsonToModelTest() throws IOException {
|
||||||
|
DedupConfig conf = DedupConfig
|
||||||
|
.load(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
SparkOpenorgsDedupTest.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")));
|
||||||
|
|
||||||
|
final String org = IOUtils.toString(getClass().getResourceAsStream("organization_example1.json"));
|
||||||
|
|
||||||
|
Row row = SparkModel.apply(conf).rowFromJson(org);
|
||||||
|
// to check that the same parsing returns the same row
|
||||||
|
Row row1 = SparkModel.apply(conf).rowFromJson(org);
|
||||||
|
|
||||||
|
Assertions.assertEquals(row, row1);
|
||||||
|
System.out.println("row = " + row);
|
||||||
Assertions.assertNotNull(row);
|
Assertions.assertNotNull(row);
|
||||||
Assertions.assertTrue(StringUtils.isNotBlank(row.getAs("identifier")));
|
Assertions.assertTrue(StringUtils.isNotBlank(row.getAs("identifier")));
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
"dedupRun" : "001",
|
"dedupRun" : "001",
|
||||||
"entityType" : "organization",
|
"entityType" : "organization",
|
||||||
"subEntityValue": "organization",
|
"subEntityValue": "organization",
|
||||||
"orderField" : "legalname",
|
"orderField" : "original_legalname",
|
||||||
"queueMaxSize" : "2000",
|
"queueMaxSize" : "100000",
|
||||||
"groupMaxSize" : "50",
|
"groupMaxSize" : "50",
|
||||||
"slidingWindowSize" : "200",
|
"slidingWindowSize" : "200",
|
||||||
"idPath":"$.id",
|
"idPath":"$.id",
|
||||||
|
@ -15,10 +15,10 @@
|
||||||
},
|
},
|
||||||
"pace" : {
|
"pace" : {
|
||||||
"clustering" : [
|
"clustering" : [
|
||||||
{ "name" : "sortedngrampairs", "fields" : [ "legalname" ], "params" : { "max" : 2, "ngramLen" : "3"} },
|
{ "name" : "sortedngrampairs", "fields" : [ "original_legalname" ], "params" : { "max" : 2, "ngramLen" : "3"} },
|
||||||
{ "name" : "suffixprefix", "fields" : [ "legalname" ], "params" : { "max" : 1, "len" : "3" } },
|
{ "name" : "suffixprefix", "fields" : [ "original_legalname" ], "params" : { "max" : 1, "len" : "3" } },
|
||||||
{ "name" : "urlclustering", "fields" : [ "websiteurl" ], "params" : { } },
|
{ "name" : "urlclustering", "fields" : [ "websiteurl" ], "params" : { } },
|
||||||
{ "name" : "keywordsclustering", "fields" : [ "legalname" ], "params" : { "max": 2, "windowSize": 4} }
|
{ "name" : "legalnameclustering", "fields" : [ "legalname" ], "params" : { "max": 2} }
|
||||||
],
|
],
|
||||||
"decisionTree" : {
|
"decisionTree" : {
|
||||||
"start": {
|
"start": {
|
||||||
|
@ -29,16 +29,23 @@
|
||||||
"weight": 1,
|
"weight": 1,
|
||||||
"countIfUndefined": "false",
|
"countIfUndefined": "false",
|
||||||
"params": {}
|
"params": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "rorid",
|
||||||
|
"comparator": "exactMatch",
|
||||||
|
"weight": 1,
|
||||||
|
"countIfUndefined": "false",
|
||||||
|
"params": {}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"threshold": 1,
|
"threshold": 1,
|
||||||
"aggregation": "AVG",
|
"aggregation": "OR",
|
||||||
"positive": "MATCH",
|
"positive": "MATCH",
|
||||||
"negative": "NO_MATCH",
|
"negative": "NO_MATCH",
|
||||||
"undefined": "layer2",
|
"undefined": "necessaryConditions",
|
||||||
"ignoreUndefined": "false"
|
"ignoreUndefined": "false"
|
||||||
},
|
},
|
||||||
"layer2": {
|
"necessaryConditions": {
|
||||||
"fields": [
|
"fields": [
|
||||||
{
|
{
|
||||||
"field": "websiteurl",
|
"field": "websiteurl",
|
||||||
|
@ -55,14 +62,14 @@
|
||||||
"params": {}
|
"params": {}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "legalname",
|
"field": "original_legalname",
|
||||||
"comparator": "numbersMatch",
|
"comparator": "numbersMatch",
|
||||||
"weight": 1,
|
"weight": 1,
|
||||||
"countIfUndefined": "true",
|
"countIfUndefined": "true",
|
||||||
"params": {}
|
"params": {}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "legalname",
|
"field": "original_legalname",
|
||||||
"comparator": "romansMatch",
|
"comparator": "romansMatch",
|
||||||
"weight": 1,
|
"weight": 1,
|
||||||
"countIfUndefined": "true",
|
"countIfUndefined": "true",
|
||||||
|
@ -71,68 +78,64 @@
|
||||||
],
|
],
|
||||||
"threshold": 1,
|
"threshold": 1,
|
||||||
"aggregation": "AND",
|
"aggregation": "AND",
|
||||||
"positive": "layer3",
|
"positive": "cityCheck",
|
||||||
"negative": "NO_MATCH",
|
"negative": "NO_MATCH",
|
||||||
"undefined": "layer3",
|
"undefined": "cityCheck",
|
||||||
"ignoreUndefined": "true"
|
"ignoreUndefined": "true"
|
||||||
},
|
},
|
||||||
"layer3": {
|
"cityCheck": {
|
||||||
"fields": [
|
"fields": [
|
||||||
{
|
{
|
||||||
"field": "legalname",
|
"field": "legalname",
|
||||||
"comparator": "cityMatch",
|
"comparator": "codeMatch",
|
||||||
"weight": 1.0,
|
"weight": 1.0,
|
||||||
"countIfUndefined": "true",
|
"countIfUndefined": "true",
|
||||||
"params": {
|
"params": {
|
||||||
"windowSize": "4"
|
"codeRegex": "city::\\d+"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"threshold": 0.1,
|
"threshold": 0.1,
|
||||||
"aggregation": "AVG",
|
"aggregation": "AVG",
|
||||||
"positive": "layer4",
|
"positive": "keywordCheck",
|
||||||
"negative": "NO_MATCH",
|
"negative": "NO_MATCH",
|
||||||
"undefined": "NO_MATCH",
|
"undefined": "NO_MATCH",
|
||||||
"ignoreUndefined": "true"
|
"ignoreUndefined": "true"
|
||||||
},
|
},
|
||||||
"layer4": {
|
"keywordCheck": {
|
||||||
"fields": [
|
"fields": [
|
||||||
{
|
{
|
||||||
"field": "legalname",
|
"field": "legalname",
|
||||||
"comparator": "keywordMatch",
|
"comparator": "codeMatch",
|
||||||
"weight": 1.0,
|
"weight": 1.0,
|
||||||
"countIfUndefined": "true",
|
"countIfUndefined": "true",
|
||||||
"params": {
|
"params": {
|
||||||
"windowSize": "4"
|
"codeRegex": "key::\\d+"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"threshold": 0.7,
|
"threshold": 0.7,
|
||||||
"aggregation": "AVG",
|
"aggregation": "AVG",
|
||||||
"positive": "layer5",
|
"positive": "nameCheck",
|
||||||
"negative": "NO_MATCH",
|
"negative": "NO_MATCH",
|
||||||
"undefined": "layer5",
|
"undefined": "nameCheck",
|
||||||
"ignoreUndefined": "true"
|
"ignoreUndefined": "true"
|
||||||
},
|
},
|
||||||
"layer5": {
|
"nameCheck": {
|
||||||
"fields": [
|
"fields": [
|
||||||
{
|
{
|
||||||
"field": "legalname",
|
"field": "legalname",
|
||||||
"comparator": "jaroWinklerNormalizedName",
|
"comparator": "jaroWinklerLegalname",
|
||||||
"weight": 0.9,
|
"weight": 0.9,
|
||||||
"countIfUndefined": "true",
|
"countIfUndefined": "true",
|
||||||
"params": {
|
"params": {}
|
||||||
"windowSize": "4"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "legalshortname",
|
"field": "legalshortname",
|
||||||
"comparator": "jaroWinklerNormalizedName",
|
"comparator": "jaroWinklerLegalname",
|
||||||
"weight": 0.1,
|
"weight": 0.1,
|
||||||
"countIfUndefined": "false",
|
"countIfUndefined": "false",
|
||||||
"params": {
|
"params": {}
|
||||||
"windowSize": 4
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"threshold": 0.9,
|
"threshold": 0.9,
|
||||||
|
@ -144,126 +147,16 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"model" : [
|
"model" : [
|
||||||
{ "name" : "country", "type" : "String", "path" : "$.country.classid"},
|
{ "name" : "country", "type" : "String", "path" : "$.country.classid", "infer" : "country", "inferenceFrom" : "$.legalname.value"},
|
||||||
{ "name" : "legalshortname", "type" : "String", "path" : "$.legalshortname.value"},
|
{ "name" : "legalshortname", "type" : "String", "path" : "$.legalshortname.value", "infer" : "city_keyword"},
|
||||||
{ "name" : "legalname", "type" : "String", "path" : "$.legalname.value" },
|
{ "name" : "original_legalname", "type" : "String", "path" : "$.legalname.value" },
|
||||||
|
{ "name" : "legalname", "type" : "String", "path" : "$.legalname.value", "infer" : "city_keyword"},
|
||||||
{ "name" : "websiteurl", "type" : "URL", "path" : "$.websiteurl.value" },
|
{ "name" : "websiteurl", "type" : "URL", "path" : "$.websiteurl.value" },
|
||||||
{ "name" : "gridid", "type" : "String", "path" : "$.pid[?(@.qualifier.classid =='grid')].value"},
|
{ "name" : "gridid", "type" : "String", "path" : "$.pid[?(@.qualifier.classid =='grid')].value"},
|
||||||
|
{ "name" : "rorid", "type" : "String", "path" : "$.pid[?(@.qualifier.classid =='ROR')].value"},
|
||||||
{ "name" : "originalId", "type" : "String", "path" : "$.id" }
|
{ "name" : "originalId", "type" : "String", "path" : "$.id" }
|
||||||
],
|
],
|
||||||
"blacklists" : {
|
"blacklists" : {},
|
||||||
"legalname" : []
|
"synonyms": {}
|
||||||
},
|
|
||||||
"synonyms": {
|
|
||||||
"key::1": ["university","università", "universitas", "università studi","universitario","universitaria","université", "universite", "universitaire","universitaires","universidad","universitade","Universität","universitaet","Uniwersytet","университет","universiteit","πανεπιστήμιο","universitesi","universiteti", "universiti"],
|
|
||||||
"key::2": ["studies","studi","études","estudios","estudos","Studien","studia","исследования","studies","σπουδές"],
|
|
||||||
"key::3": ["advanced","superiore","supérieur","supérieure","supérieurs","supérieures","avancado","avancados","fortgeschrittene","fortgeschritten","zaawansowany","передовой","gevorderd","gevorderde","προχωρημένος","προχωρημένη","προχωρημένο","προχωρημένες","προχωρημένα","wyzsza"],
|
|
||||||
"key::4": ["institute","istituto","institut","instituto","instituto","Institut","instytut","институт","instituut","ινστιτούτο"],
|
|
||||||
"key::5": ["hospital","ospedale","hôpital","hospital","hospital","Krankenhaus","szpital","больница","ziekenhuis","νοσοκομείο"],
|
|
||||||
"key::6": ["research","ricerca","recherche","investigacion","pesquisa","Forschung","badania","исследования","onderzoek","έρευνα","erevna","erevnas"],
|
|
||||||
"key::7": ["college","collegio","colegio","faculdade","Hochschule","Szkoła Wyższa","Высшая школа","κολλέγιο"],
|
|
||||||
"key::8": ["foundation","fondazione","fondation","fundación","fundação","Stiftung","Fundacja","фонд","stichting","ίδρυμα","idryma"],
|
|
||||||
"key::9": ["center","centro","centre","centro","centro","zentrum","centrum","центр","centrum","κέντρο"],
|
|
||||||
"key::10": ["national","nazionale","national","nationale","nationaux","nationales","nacional","nacional","national","krajowy","национальный","nationaal","nationale","εθνικό"],
|
|
||||||
"key::11": ["association","associazione","association","asociación","associação","Verein","verband","stowarzyszenie","ассоциация","associatie"],
|
|
||||||
"key::12": ["society","societa","société","sociedad","sociedade","gesellschaft","społeczeństwo","общество","maatschappij","κοινωνία"],
|
|
||||||
"key::13": ["international","internazionale","international","internacional","internacional","international","międzynarodowy","Международный","internationaal","internationale","διεθνής","διεθνή","διεθνές"],
|
|
||||||
"key::14": ["community","comunita","communauté","comunidad","comunidade","Gemeinschaft","społeczność","сообщество","gemeenschap","κοινότητα"],
|
|
||||||
"key::15": ["school","scuola","école","escuela","escola","schule","Szkoła","школа","school","σχολείο"],
|
|
||||||
"key::16": ["education","educazione","éducation","educacion","Educação","Bildung","Edukacja","образование","opleiding","εκπαίδευση"],
|
|
||||||
"key::17": ["academy","accademia","académie","academia","academia","Akademie","akademie","академия","academie","ακαδημία"],
|
|
||||||
"key::18": ["public","pubblico","public","publique","publics","publiques","publico","publico","Öffentlichkeit","publiczny","публичный","publiek","publieke","δημόσιος","δημόσια","δημόσιο"],
|
|
||||||
"key::19": ["museum","museo","musée","mueso","museu","museum","muzeum","музей","museum","μουσείο"],
|
|
||||||
"key::20": ["group","gruppo","groupe","grupo","grupo","gruppe","grupa","группа","groep","ομάδα","όμιλος"],
|
|
||||||
"key::21": ["department","dipartimento","département","departamento","departamento","abteilung","departament","отдел","afdeling","τμήμα"],
|
|
||||||
"key::22": ["council","consiglio","conseil","Consejo","conselho","gesellschaft","rada","совет","raad","συμβούλιο"],
|
|
||||||
"key::23": ["library","biblioteca","bibliothèque","biblioteca","biblioteca","Bibliothek","biblioteka","библиотека","bibliotheek","βιβλιοθήκη"],
|
|
||||||
"key::24": ["ministry","ministero","ministère","ministerio","ministério","Ministerium","ministerstwo","министерство","ministerie","υπουργείο"],
|
|
||||||
"key::25": ["services","servizi","services","servicios","Serviços","Dienstleistungen","usługi","услуги","diensten","υπηρεσίες"],
|
|
||||||
"key::26": ["central","centrale","central","centrale","centrales","central","central","zentral","centralny","цетральный","centraal","κεντρικός","κεντρική","κεντρικό","κεντρικά"],
|
|
||||||
"key::27": ["general","generale","général","générale","généraux","générales","general","geral","general","Allgemeines","general","общий","algemeen","algemene","γενικός","γενική","γενικό","γενικά"],
|
|
||||||
"key::28": ["applied","applicati","appliqué","appliquée","appliqués","appliquées","aplicado","aplicada","angewendet","stosowany","прикладной","toegepast","toegepaste","εφαρμοσμένος","εφαρμοσμένη","εφαρμοσμένο","εφαρμοσμένα"],
|
|
||||||
"key::29": ["european","europee","europea","européen","européenne","européens","européennes","europeo","europeu","europäisch","europejski","европейский","Europees","Europese","ευρωπαϊκός","ευρωπαϊκή","ευρωπαϊκό","ευρωπαϊκά"],
|
|
||||||
"key::30": ["agency","agenzia","agence","agencia","agencia","agentur","agencja","агенция","agentschap","πρακτορείο"],
|
|
||||||
"key::31": ["laboratory","laboratorio","laboratoire","laboratorio","laboratorio","labor","laboratorium","лаборатория","laboratorium","εργαστήριο"],
|
|
||||||
"key::32": ["industry","industria","industrie","индустрия","industrie","βιομηχανία"],
|
|
||||||
"key::33": ["industrial","industriale","industriel","industrielle","industriels","industrielles","индустриальный","industrieel","βιομηχανικός","βιομηχανική","βιομηχανικό","βιομηχανικά","βιομηχανικές"],
|
|
||||||
"key::34": ["consortium","consorzio","consortium","консорциум","consortium","κοινοπραξία"],
|
|
||||||
"key::35": ["organization","organizzazione","organisation","organización","organização","organizacja","организация","organisatie","οργανισμός"],
|
|
||||||
"key::36": ["authority","autorità","autorité","авторитет","autoriteit"],
|
|
||||||
"key::37": ["federation","federazione","fédération","федерация","federatie","ομοσπονδία"],
|
|
||||||
"key::38": ["observatory","osservatorio","observatoire","обсерватория","observatorium","αστεροσκοπείο"],
|
|
||||||
"key::39": ["bureau","ufficio","bureau","офис","bureau","γραφείο"],
|
|
||||||
"key::40": ["company","impresa","compagnie","société","компания","bedrijf","εταιρία"],
|
|
||||||
"key::41": ["polytechnic","politecnico","polytechnique","политехника","polytechnisch","πολυτεχνείο","universita politecnica","polytechnic university","universidad politecnica","universitat politecnica","politechnika","politechniki","university technology","university science technology"],
|
|
||||||
"key::42": ["coalition","coalizione","coalition","коалиция","coalitie","συνασπισμός"],
|
|
||||||
"key::43": ["initiative","iniziativa","initiative","инициатива","initiatief","πρωτοβουλία"],
|
|
||||||
"key::44": ["academic","accademico","académique","universitaire","акадеческий academisch","ακαδημαϊκός","ακαδημαϊκή","ακαδημαϊκό","ακαδημαϊκές","ακαδημαϊκοί"],
|
|
||||||
"key::45": ["institution","istituzione","institution","институциональный","instelling","ινστιτούτο"],
|
|
||||||
"key::46": ["division","divisione","division","отделение","divisie","τμήμα"],
|
|
||||||
"key::47": ["committee","comitato","comité","комитет","commissie","επιτροπή"],
|
|
||||||
"key::48": ["promotion","promozione","продвижение","proothisis","forderung"],
|
|
||||||
"key::49": ["medical","medicine","clinical","medicina","clinici","médico","medicina","clínica","médico","medicina","clínica","medizinisch","Medizin","klinisch","medisch","geneeskunde","klinisch","ιατρικός","ιατρική","ιατρικό","ιατρικά","κλινικός","κλινική","κλινικό","κλινικά","tıbbi","tıp","klinik","orvosi","orvostudomány","klinikai","zdravniški","medicinski","klinični","meditsiini","kliinik","kliiniline"],
|
|
||||||
"key::50": ["technology","technological","tecnologia","tecnologie","tecnología","tecnológico","tecnologia","tecnológico","Technologie","technologisch","technologie","technologisch","τεχνολογία","τεχνολογικός","τεχνολογική","τεχνολογικό","teknoloji","teknolojik","technológia","technológiai","tehnologija","tehnološki","tehnoloogia","tehnoloogiline","technologii","technical","texniki","teknik"],
|
|
||||||
"key::51": ["science","scientific","scienza","scientifiche","scienze","ciencia","científico","ciência","científico","Wissenschaft","wissenschaftlich","wetenschap","wetenschappelijk","επιστήμη","επιστημονικός","επιστημονική","επιστημονικό","επιστημονικά","bilim","bilimsel","tudomány","tudományos","znanost","znanstveni","teadus","teaduslik",""],
|
|
||||||
"key::52": ["engineering","ingegneria","ingeniería","engenharia","Ingenieurwissenschaft","ingenieurswetenschappen","bouwkunde","μηχανικός","μηχανική","μηχανικό","mühendislik","mérnöki","Inženirstvo","inseneeria","inseneri",""],
|
|
||||||
"key::53": ["management","gestione","gestionale","gestionali","gestión","administración","gestão","administração","Verwaltung","management","διαχείριση","yönetim","menedzsment","vodstvo","upravljanje","management","juhtkond","juhtimine","haldus",""],
|
|
||||||
"key::54": ["energy","energia","energía","energia","Energie","energie","ενέργεια","enerji","energia","energija","energia",""],
|
|
||||||
"key::55": ["agricultural","agriculture","agricoltura","agricole","agrícola","agricultura","agrícola","agricultura","landwirtschaftlich","Landwirtschaft","landbouwkundig","landbouw","αγροτικός","αγροτική","αγροτικό","γεωργικός","γεωργική","γεωργικό","γεωργία","tarımsal","tarım","mezőgazdasági","mezőgazdaság","poljedelski","poljedelstvo","põllumajandus","põllumajanduslik",""],
|
|
||||||
"key::56": ["information","informazione","información","informação","Information","informatie","πληροφορία","bilgi","információ","informacija","informatsioon","informatycznych",""],
|
|
||||||
"key::57": ["social","sociali","social","social","Sozial","sociaal","maatschappelijk","κοινωνικός","κοινωνική","κοινωνικό","κοινωνικά","sosyal","szociális","družbeni","sotsiaal","sotsiaalne",""],
|
|
||||||
"key::58": ["environmental","ambiente","medioambiental","ambiente","medioambiente","meioambiente","Umwelt","milieu","milieuwetenschap","milieukunde","περιβαλλοντικός","περιβαλλοντική","περιβαλλοντικό","περιβαλλοντικά","çevre","környezeti","okoliški","keskonna",""],
|
|
||||||
"key::59": ["business","economia","economiche","economica","negocio","empresa","negócio","Unternehmen","bedrijf","bedrijfskunde","επιχείρηση","iş","üzleti","posel","ettevõte/äri",""],
|
|
||||||
"key::60": ["pharmaceuticals","pharmacy","farmacia","farmaceutica","farmacéutica","farmacia","farmacêutica","farmácia","Pharmazeutika","Arzneimittelkunde","farmaceutica","geneesmiddelen","apotheek","φαρμακευτικός","φαρμακευτική","φαρμακευτικό","φαρμακευτικά","φαρμακείο","ilaç","eczane","gyógyszerészeti","gyógyszertár","farmacevtika","lekarništvo","farmaatsia","farmatseutiline",""],
|
|
||||||
"key::61": ["healthcare","health services","salute","atenciónmédica","cuidadodelasalud","cuidadoscomasaúde","Gesundheitswesen","gezondheidszorg","ιατροφαρμακευτικήπερίθαλψη","sağlıkhizmeti","egészségügy","zdravstvo","tervishoid","tervishoiu",""],
|
|
||||||
"key::62": ["history","storia","historia","história","Geschichte","geschiedenis","geschiedkunde","ιστορία","tarih","történelem","zgodovina","ajalugu",""],
|
|
||||||
"key::63": ["materials","materiali","materia","materiales","materiais","materialen","υλικά","τεκμήρια","malzemeler","anyagok","materiali","materjalid","vahendid",""],
|
|
||||||
"key::64": ["economics","economia","economiche","economica","economía","economia","Wirtschaft","economie","οικονομικά","οικονομικέςεπιστήμες","ekonomi","közgazdaságtan","gospodarstvo","ekonomija","majanduslik","majandus",""],
|
|
||||||
"key::65": ["therapeutics","terapeutica","terapéutica","terapêutica","therapie","θεραπευτική","tedavibilimi","gyógykezelés","terapevtika","terapeutiline","ravi",""],
|
|
||||||
"key::66": ["oncology","oncologia","oncologico","oncología","oncologia","Onkologie","oncologie","ογκολογία","onkoloji","onkológia","onkologija","onkoloogia",""],
|
|
||||||
"key::67": ["natural","naturali","naturale","natural","natural","natürlich","natuurlijk","φυσικός","φυσική","φυσικό","φυσικά","doğal","természetes","naraven","loodus",""],
|
|
||||||
"key::68": ["educational","educazione","pedagogia","educacional","educativo","educacional","pädagogisch","educatief","εκπαιδευτικός","εκπαιδευτική","εκπαιδευτικό","εκπαιδευτικά","eğitimsel","oktatási","izobraževalen","haridus","hariduslik",""],
|
|
||||||
"key::69": ["biomedical","biomedica","biomédico","biomédico","biomedizinisch","biomedisch","βιοιατρικός","βιοιατρική","βιοιατρικό","βιοιατρικά","biyomedikal","orvosbiológiai","biomedicinski","biomeditsiiniline",""],
|
|
||||||
"key::70": ["veterinary","veterinaria","veterinarie","veterinaria","veterinária","tierärtzlich","veterinair","veeartsenijlkunde","κτηνιατρικός","κτηνιατρική","κτηνιατρικό","κτηνιατρικά","veteriner","állatorvosi","veterinar","veterinarski","veterinaaria",""],
|
|
||||||
"key::71": ["chemistry","chimica","química","química","Chemie","chemie","scheikunde","χημεία","kimya","kémia","kemija","keemia",""],
|
|
||||||
"key::72": ["security","sicurezza","seguridad","segurança","Sicherheit","veiligheid","ασφάλεια","güvenlik","biztonsági","varnost","turvalisus","julgeolek",""],
|
|
||||||
"key::73": ["biotechnology","biotecnologia","biotecnologie","biotecnología","biotecnologia","Biotechnologie","biotechnologie","βιοτεχνολογία","biyoteknoloji","biotechnológia","biotehnologija","biotehnoloogia",""],
|
|
||||||
"key::74": ["military","militare","militari","militar","militar","Militär","militair","leger","στρατιωτικός","στρατιωτική","στρατιωτικό","στρατιωτικά","askeri","katonai","vojaški","vojni","militaar","wojskowa",""],
|
|
||||||
"key::75": ["theological","teologia","teologico","teológico","tecnológica","theologisch","theologisch","θεολογικός","θεολογική","θεολογικό","θεολογικά","teolojik","technológiai","teološki","teoloogia","usuteadus","teoloogiline",""],
|
|
||||||
"key::76": ["electronics","elettronica","electrónica","eletrônicos","Elektronik","elektronica","ηλεκτρονική","elektronik","elektronika","elektronika","elektroonika",""],
|
|
||||||
"key::77": ["forestry","forestale","forestali","silvicultura","forestal","floresta","Forstwirtschaft","bosbouw","δασοκομία","δασολογία","ormancılık","erdészet","gozdarstvo","metsandus",""],
|
|
||||||
"key::78": ["maritime","marittima","marittime","marittimo","marítimo","marítimo","maritiem","ναυτικός","ναυτική","ναυτικό","ναυτικά","ναυτιλιακός","ναυτιλιακή","ναυτιλιακό","ναυτιλιακά","θαλάσσιος","θαλάσσια","θαλάσσιο","denizcilik","tengeri","morski","mere","merendus",""],
|
|
||||||
"key::79": ["sports","sport","deportes","esportes","Sport","sport","sportwetenschappen","άθληση","γυμναστικήδραστηριότητα","spor","sport","šport","sport","spordi",""],
|
|
||||||
"key::80": ["surgery","chirurgia","chirurgiche","cirugía","cirurgia","Chirurgie","chirurgie","heelkunde","εγχείρηση","επέμβαση","χειρουργικήεπέμβαση","cerrahi","sebészet","kirurgija","kirurgia",""],
|
|
||||||
"key::81": ["cultural","culturale","culturali","cultura","cultural","cultural","kulturell","cultureel","πολιτιστικός","πολιτιστική","πολιτιστικό","πολιτισμικός","πολιτισμική","πολιτισμικό","kültürel","kultúrális","kulturni","kultuuri","kultuuriline",""],
|
|
||||||
"key::82": ["computerscience","informatica","ordenador","computadora","informática","computación","cienciasdelacomputación","ciênciadacomputação","Computer","computer","υπολογιστής","ηλεκτρονικόςυπολογιστής","bilgisayar","számítógép","računalnik","arvuti",""],
|
|
||||||
"key::83": ["finance","financial","finanza","finanziarie","finanza","financiero","finanças","financeiro","Finanzen","finanziell","financiën","financieel","χρηματοοικονομικά","χρηματοδότηση","finanse","finansal","pénzügy","pénzügyi","finance","finančni","finants","finantsiline",""],
|
|
||||||
"key::84": ["communication","comunicazione","comuniciación","comunicação","Kommunikation","communication","επικοινωνία","iletişim","kommunikáció","komuniciranje","kommunikatsioon",""],
|
|
||||||
"key::85": ["justice","giustizia","justicia","justiça","Recht","Justiz","justitie","gerechtigheid","δικαιοσύνη","υπουργείοδικαιοσύνης","δίκαιο","adalet","igazságügy","pravo","õigus",""],
|
|
||||||
"key::86": ["aerospace","aerospaziale","aerospaziali","aeroespacio","aeroespaço","Luftfahrt","luchtvaart","ruimtevaart","αεροπορικός","αεροπορική","αεροπορικό","αεροναυπηγικός","αεροναυπηγική","αεροναυπηγικό","αεροναυπηγικά","havacılıkveuzay","légtér","zrakoplovstvo","atmosfäär","kosmos",""],
|
|
||||||
"key::87": ["dermatology","dermatologia","dermatología","dermatologia","Dermatologie","dermatologie","δρματολογία","dermatoloji","bőrgyógyászat","dermatológia","dermatologija","dermatoloogia",""],
|
|
||||||
"key::88": ["architecture","architettura","arquitectura","arquitetura","Architektur","architectuur","αρχιτεκτονική","mimarlık","építészet","arhitektura","arhitektuur",""],
|
|
||||||
"key::89": ["mathematics","matematica","matematiche","matemáticas","matemáticas","Mathematik","wiskunde","mathematica","μαθηματικά","matematik","matematika","matematika","matemaatika",""],
|
|
||||||
"key::90": ["language","lingue","linguistica","linguistiche","lenguaje","idioma","língua","idioma","Sprache","taal","taalkunde","γλώσσα","dil","nyelv","jezik","keel",""],
|
|
||||||
"key::91": ["neuroscience","neuroscienza","neurociencia","neurociência","Neurowissenschaft","neurowetenschappen","νευροεπιστήμη","nörobilim","idegtudomány","nevroznanost","neuroteadused",""],
|
|
||||||
"key::92": ["automation","automazione","automatización","automação","Automatisierung","automatisering","αυτοματοποίηση","otomasyon","automatizálás","avtomatizacija","automatiseeritud",""],
|
|
||||||
"key::93": ["pediatric","pediatria","pediatriche","pediatrico","pediátrico","pediatría","pediátrico","pediatria","pädiatrisch","pediatrische","παιδιατρική","pediatrik","gyermekgyógyászat","pediatrija","pediaatria",""],
|
|
||||||
"key::94": ["photonics","fotonica","fotoniche","fotónica","fotônica","Photonik","fotonica","φωτονική","fotonik","fotonika","fotonika","fotoonika",""],
|
|
||||||
"key::95": ["mechanics", "mechanical", "meccanica","meccaniche","mecánica","mecânica","Mechanik","Maschinenbau","mechanica","werktuigkunde","μηχανικής","mekanik","gépészet","mehanika","mehaanika",""],
|
|
||||||
"key::96": ["psychiatrics","psichiatria","psichiatrica","psichiatriche","psiquiatría","psiquiatria","Psychiatrie","psychiatrie","ψυχιατρική","psikiyatrik","pszihiátria","psihiatrija","psühhaatria",""],
|
|
||||||
"key::97": ["psychology","fisiologia","psicología","psicologia","Psychologie","psychologie","ψυχολογία","psikoloji","pszihológia","psihologija","psühholoogia",""],
|
|
||||||
"key::98": ["automotive","industriaautomobilistica","industriadelautomóvil","automotriz","industriaautomotriz","automotivo","Automobilindustrie","autoindustrie","αυτοκίνητος","αυτοκίνητη","αυτοκίνητο","αυτοκινούμενος","αυτοκινούμενη","αυτοκινούμενο","αυτοκινητιστικός","αυτοκινητιστική","αυτοκινητιστικό","otomotiv","autóipari","samogiben","avtomobilskaindustrija","auto-",""],
|
|
||||||
"key::99": ["neurology","neurologia","neurologiche","neurología","neurologia","Neurologie","neurologie","zenuwleer","νευρολογία","nöroloji","neurológia","ideggyógyászat","nevrologija","neuroloogia",""],
|
|
||||||
"key::100": ["geology","geologia","geologiche","geología","geologia","Geologie","geologie","aardkunde","γεωλογία","jeoloji","geológia","földtudomány","geologija","geoloogia",""],
|
|
||||||
"key::101": ["microbiology","microbiologia","micro-biologia","microbiologiche","microbiología","microbiologia","Mikrobiologie","microbiologie","μικροβιολογία","mikrobiyoloji","mikrobiológia","mikrobiologija","mikrobioloogia",""],
|
|
||||||
"key::102": ["informatics","informatica","informática","informática","informatica",""],
|
|
||||||
"key::103": ["forschungsgemeinschaft","comunita ricerca","research community","research foundation","research association"],
|
|
||||||
"key::104": ["commerce","ticaret","ticarət","commercio","trade","handel","comercio"],
|
|
||||||
"key::105" : ["state", "stato", "etade", "estado", "statale", "etat", "zustand", "estado"],
|
|
||||||
"key::106" : ["seminary", "seminario", "seminaire", "seminar"],
|
|
||||||
"key::107" : ["agricultural forestry", "af", "a f"],
|
|
||||||
"key::108" : ["agricultural mechanical", "am", "a m"],
|
|
||||||
"key::109" : ["catholic", "catholique", "katholische", "catolica", "cattolica", "catolico"]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
File diff suppressed because one or more lines are too long
|
@ -33,10 +33,7 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.bulktag.community.*;
|
import eu.dnetlib.dhp.bulktag.community.*;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Context;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Datasource;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Project;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
@ -114,27 +111,35 @@ public class SparkBulkTagJob {
|
||||||
extendCommunityConfigurationForEOSC(spark, inputPath, cc);
|
extendCommunityConfigurationForEOSC(spark, inputPath, cc);
|
||||||
execBulkTag(
|
execBulkTag(
|
||||||
spark, inputPath, outputPath, protoMap, cc);
|
spark, inputPath, outputPath, protoMap, cc);
|
||||||
|
execEntityTag(
|
||||||
|
spark, inputPath + "organization", outputPath + "organization",
|
||||||
|
Utils.getCommunityOrganization(baseURL), Organization.class, TaggingConstants.CLASS_ID_ORGANIZATION,
|
||||||
|
TaggingConstants.CLASS_NAME_BULKTAG_ORGANIZATION);
|
||||||
|
execEntityTag(
|
||||||
|
spark, inputPath + "project", outputPath + "project", Utils.getCommunityProjects(baseURL),
|
||||||
|
Project.class, TaggingConstants.CLASS_ID_PROJECT, TaggingConstants.CLASS_NAME_BULKTAG_PROJECT);
|
||||||
execDatasourceTag(spark, inputPath, outputPath, Utils.getDatasourceCommunities(baseURL));
|
execDatasourceTag(spark, inputPath, outputPath, Utils.getDatasourceCommunities(baseURL));
|
||||||
execProjectTag(spark, inputPath, outputPath, Utils.getCommunityProjects(baseURL));
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void execProjectTag(SparkSession spark, String inputPath, String outputPath,
|
private static <E extends OafEntity> void execEntityTag(SparkSession spark, String inputPath, String outputPath,
|
||||||
CommunityEntityMap communityProjects) {
|
CommunityEntityMap communityEntity, Class<E> entityClass,
|
||||||
Dataset<Project> projects = readPath(spark, inputPath + "project", Project.class);
|
String classID, String calssName) {
|
||||||
|
Dataset<E> entity = readPath(spark, inputPath, entityClass);
|
||||||
Dataset<EntityCommunities> pc = spark
|
Dataset<EntityCommunities> pc = spark
|
||||||
.createDataset(
|
.createDataset(
|
||||||
communityProjects
|
communityEntity
|
||||||
.keySet()
|
.keySet()
|
||||||
.stream()
|
.stream()
|
||||||
.map(k -> EntityCommunities.newInstance(k, communityProjects.get(k)))
|
.map(k -> EntityCommunities.newInstance(k, communityEntity.get(k)))
|
||||||
.collect(Collectors.toList()),
|
.collect(Collectors.toList()),
|
||||||
Encoders.bean(EntityCommunities.class));
|
Encoders.bean(EntityCommunities.class));
|
||||||
|
|
||||||
projects
|
entity
|
||||||
.joinWith(pc, projects.col("id").equalTo(pc.col("entityId")), "left")
|
.joinWith(pc, entity.col("id").equalTo(pc.col("entityId")), "left")
|
||||||
.map((MapFunction<Tuple2<Project, EntityCommunities>, Project>) t2 -> {
|
.map((MapFunction<Tuple2<E, EntityCommunities>, E>) t2 -> {
|
||||||
Project ds = t2._1();
|
E ds = t2._1();
|
||||||
if (t2._2() != null) {
|
if (t2._2() != null) {
|
||||||
List<String> context = Optional
|
List<String> context = Optional
|
||||||
.ofNullable(ds.getContext())
|
.ofNullable(ds.getContext())
|
||||||
|
@ -156,8 +161,8 @@ public class SparkBulkTagJob {
|
||||||
false, TaggingConstants.BULKTAG_DATA_INFO_TYPE, true, false,
|
false, TaggingConstants.BULKTAG_DATA_INFO_TYPE, true, false,
|
||||||
OafMapperUtils
|
OafMapperUtils
|
||||||
.qualifier(
|
.qualifier(
|
||||||
TaggingConstants.CLASS_ID_DATASOURCE,
|
classID,
|
||||||
TaggingConstants.CLASS_NAME_BULKTAG_DATASOURCE,
|
calssName,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
"1")));
|
"1")));
|
||||||
|
@ -166,17 +171,17 @@ public class SparkBulkTagJob {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return ds;
|
return ds;
|
||||||
}, Encoders.bean(Project.class))
|
}, Encoders.bean(entityClass))
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath + "project");
|
.json(outputPath);
|
||||||
|
|
||||||
readPath(spark, outputPath + "project", Project.class)
|
readPath(spark, outputPath, entityClass)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(inputPath + "project");
|
.json(inputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void execDatasourceTag(SparkSession spark, String inputPath, String outputPath,
|
private static void execDatasourceTag(SparkSession spark, String inputPath, String outputPath,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue