forked from D-Net/dnet-hadoop
mergin with branch beta
This commit is contained in:
commit
8448b9ebfb
|
@ -26,3 +26,4 @@ spark-warehouse
|
||||||
/**/*.log
|
/**/*.log
|
||||||
/**/.factorypath
|
/**/.factorypath
|
||||||
/**/.scalafmt.conf
|
/**/.scalafmt.conf
|
||||||
|
/.java-version
|
||||||
|
|
|
@ -51,6 +51,7 @@ public class Constants {
|
||||||
public static final String RETRY_DELAY = "retryDelay";
|
public static final String RETRY_DELAY = "retryDelay";
|
||||||
public static final String CONNECT_TIMEOUT = "connectTimeOut";
|
public static final String CONNECT_TIMEOUT = "connectTimeOut";
|
||||||
public static final String READ_TIMEOUT = "readTimeOut";
|
public static final String READ_TIMEOUT = "readTimeOut";
|
||||||
|
public static final String REQUEST_METHOD = "requestMethod";
|
||||||
public static final String FROM_DATE_OVERRIDE = "fromDateOverride";
|
public static final String FROM_DATE_OVERRIDE = "fromDateOverride";
|
||||||
public static final String UNTIL_DATE_OVERRIDE = "untilDateOverride";
|
public static final String UNTIL_DATE_OVERRIDE = "untilDateOverride";
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.collection;
|
package eu.dnetlib.dhp.common.collection;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bundles the http connection parameters driving the client behaviour.
|
* Bundles the http connection parameters driving the client behaviour.
|
||||||
*/
|
*/
|
||||||
|
@ -13,6 +16,8 @@ public class HttpClientParams {
|
||||||
public static int _connectTimeOut = 10; // seconds
|
public static int _connectTimeOut = 10; // seconds
|
||||||
public static int _readTimeOut = 30; // seconds
|
public static int _readTimeOut = 30; // seconds
|
||||||
|
|
||||||
|
public static String _requestMethod = "GET";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Maximum number of allowed retires before failing
|
* Maximum number of allowed retires before failing
|
||||||
*/
|
*/
|
||||||
|
@ -38,17 +43,30 @@ public class HttpClientParams {
|
||||||
*/
|
*/
|
||||||
private int readTimeOut;
|
private int readTimeOut;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Custom http headers
|
||||||
|
*/
|
||||||
|
private Map<String, String> headers;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request method (i.e., GET, POST etc)
|
||||||
|
*/
|
||||||
|
private String requestMethod;
|
||||||
|
|
||||||
public HttpClientParams() {
|
public HttpClientParams() {
|
||||||
this(_maxNumberOfRetry, _requestDelay, _retryDelay, _connectTimeOut, _readTimeOut);
|
this(_maxNumberOfRetry, _requestDelay, _retryDelay, _connectTimeOut, _readTimeOut, new HashMap<>(),
|
||||||
|
_requestMethod);
|
||||||
}
|
}
|
||||||
|
|
||||||
public HttpClientParams(int maxNumberOfRetry, int requestDelay, int retryDelay, int connectTimeOut,
|
public HttpClientParams(int maxNumberOfRetry, int requestDelay, int retryDelay, int connectTimeOut,
|
||||||
int readTimeOut) {
|
int readTimeOut, Map<String, String> headers, String requestMethod) {
|
||||||
this.maxNumberOfRetry = maxNumberOfRetry;
|
this.maxNumberOfRetry = maxNumberOfRetry;
|
||||||
this.requestDelay = requestDelay;
|
this.requestDelay = requestDelay;
|
||||||
this.retryDelay = retryDelay;
|
this.retryDelay = retryDelay;
|
||||||
this.connectTimeOut = connectTimeOut;
|
this.connectTimeOut = connectTimeOut;
|
||||||
this.readTimeOut = readTimeOut;
|
this.readTimeOut = readTimeOut;
|
||||||
|
this.headers = headers;
|
||||||
|
this.requestMethod = requestMethod;
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getMaxNumberOfRetry() {
|
public int getMaxNumberOfRetry() {
|
||||||
|
@ -91,4 +109,19 @@ public class HttpClientParams {
|
||||||
this.readTimeOut = readTimeOut;
|
this.readTimeOut = readTimeOut;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public Map<String, String> getHeaders() {
|
||||||
|
return headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setHeaders(Map<String, String> headers) {
|
||||||
|
this.headers = headers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getRequestMethod() {
|
||||||
|
return requestMethod;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRequestMethod(String requestMethod) {
|
||||||
|
this.requestMethod = requestMethod;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,7 +107,14 @@ public class HttpConnector2 {
|
||||||
urlConn.setReadTimeout(getClientParams().getReadTimeOut() * 1000);
|
urlConn.setReadTimeout(getClientParams().getReadTimeOut() * 1000);
|
||||||
urlConn.setConnectTimeout(getClientParams().getConnectTimeOut() * 1000);
|
urlConn.setConnectTimeout(getClientParams().getConnectTimeOut() * 1000);
|
||||||
urlConn.addRequestProperty(HttpHeaders.USER_AGENT, userAgent);
|
urlConn.addRequestProperty(HttpHeaders.USER_AGENT, userAgent);
|
||||||
|
urlConn.setRequestMethod(getClientParams().getRequestMethod());
|
||||||
|
|
||||||
|
// if provided, add custom headers
|
||||||
|
if (!getClientParams().getHeaders().isEmpty()) {
|
||||||
|
for (Map.Entry<String, String> headerEntry : getClientParams().getHeaders().entrySet()) {
|
||||||
|
urlConn.addRequestProperty(headerEntry.getKey(), headerEntry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
logHeaderFields(urlConn);
|
logHeaderFields(urlConn);
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,6 +36,19 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
|
|
||||||
public static final int TITLE_FILTER_RESIDUAL_LENGTH = 5;
|
public static final int TITLE_FILTER_RESIDUAL_LENGTH = 5;
|
||||||
private static final String NAME_CLEANING_REGEX = "[\\r\\n\\t\\s]+";
|
private static final String NAME_CLEANING_REGEX = "[\\r\\n\\t\\s]+";
|
||||||
|
private static final HashSet<String> PEER_REVIEWED_TYPES = new HashSet<>();
|
||||||
|
|
||||||
|
static {
|
||||||
|
PEER_REVIEWED_TYPES.add("Article");
|
||||||
|
PEER_REVIEWED_TYPES.add("Part of book or chapter of book");
|
||||||
|
PEER_REVIEWED_TYPES.add("Book");
|
||||||
|
PEER_REVIEWED_TYPES.add("Doctoral thesis");
|
||||||
|
PEER_REVIEWED_TYPES.add("Master thesis");
|
||||||
|
PEER_REVIEWED_TYPES.add("Data Paper");
|
||||||
|
PEER_REVIEWED_TYPES.add("Thesis");
|
||||||
|
PEER_REVIEWED_TYPES.add("Bachelor thesis");
|
||||||
|
PEER_REVIEWED_TYPES.add("Conference object");
|
||||||
|
}
|
||||||
|
|
||||||
public static <T extends Oaf> T cleanContext(T value, String contextId, String verifyParam) {
|
public static <T extends Oaf> T cleanContext(T value, String contextId, String verifyParam) {
|
||||||
if (ModelSupport.isSubClass(value, Result.class)) {
|
if (ModelSupport.isSubClass(value, Result.class)) {
|
||||||
|
@ -493,6 +506,28 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
if (Objects.isNull(i.getRefereed()) || StringUtils.isBlank(i.getRefereed().getClassid())) {
|
if (Objects.isNull(i.getRefereed()) || StringUtils.isBlank(i.getRefereed().getClassid())) {
|
||||||
i.setRefereed(qualifier("0000", "Unknown", ModelConstants.DNET_REVIEW_LEVELS));
|
i.setRefereed(qualifier("0000", "Unknown", ModelConstants.DNET_REVIEW_LEVELS));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// from the script from Dimitris
|
||||||
|
if ("0000".equals(i.getRefereed().getClassid())) {
|
||||||
|
final boolean isFromCrossref = ModelConstants.CROSSREF_ID
|
||||||
|
.equals(i.getCollectedfrom().getKey());
|
||||||
|
final boolean hasDoi = i
|
||||||
|
.getPid()
|
||||||
|
.stream()
|
||||||
|
.anyMatch(pid -> PidType.doi.toString().equals(pid.getQualifier().getClassid()));
|
||||||
|
final boolean isPeerReviewedType = PEER_REVIEWED_TYPES
|
||||||
|
.contains(i.getInstancetype().getClassname());
|
||||||
|
final boolean noOtherLitType = r
|
||||||
|
.getInstance()
|
||||||
|
.stream()
|
||||||
|
.noneMatch(ii -> "Other literature type".equals(ii.getInstancetype().getClassname()));
|
||||||
|
if (isFromCrossref && hasDoi && isPeerReviewedType && noOtherLitType) {
|
||||||
|
i.setRefereed(qualifier("0001", "peerReviewed", ModelConstants.DNET_REVIEW_LEVELS));
|
||||||
|
} else {
|
||||||
|
i.setRefereed(qualifier("0002", "nonPeerReviewed", ModelConstants.DNET_REVIEW_LEVELS));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (Objects.nonNull(i.getDateofacceptance())) {
|
if (Objects.nonNull(i.getDateofacceptance())) {
|
||||||
Optional<String> date = cleanDateField(i.getDateofacceptance());
|
Optional<String> date = cleanDateField(i.getDateofacceptance());
|
||||||
if (date.isPresent()) {
|
if (date.isPresent()) {
|
||||||
|
|
|
@ -7,7 +7,7 @@ import java.util.regex.Pattern;
|
||||||
// https://researchguides.stevens.edu/c.php?g=442331&p=6577176
|
// https://researchguides.stevens.edu/c.php?g=442331&p=6577176
|
||||||
public class PmidCleaningRule {
|
public class PmidCleaningRule {
|
||||||
|
|
||||||
public static final Pattern PATTERN = Pattern.compile("[1-9]{1,8}");
|
public static final Pattern PATTERN = Pattern.compile("0*(\\d{1,8})");
|
||||||
|
|
||||||
public static String clean(String pmid) {
|
public static String clean(String pmid) {
|
||||||
String s = pmid
|
String s = pmid
|
||||||
|
@ -17,7 +17,7 @@ public class PmidCleaningRule {
|
||||||
final Matcher m = PATTERN.matcher(s);
|
final Matcher m = PATTERN.matcher(s);
|
||||||
|
|
||||||
if (m.find()) {
|
if (m.find()) {
|
||||||
return m.group();
|
return m.group(1);
|
||||||
}
|
}
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,10 +9,16 @@ class PmidCleaningRuleTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testCleaning() {
|
void testCleaning() {
|
||||||
|
// leading zeros are removed
|
||||||
assertEquals("1234", PmidCleaningRule.clean("01234"));
|
assertEquals("1234", PmidCleaningRule.clean("01234"));
|
||||||
|
// tolerant to spaces in the middle
|
||||||
assertEquals("1234567", PmidCleaningRule.clean("0123 4567"));
|
assertEquals("1234567", PmidCleaningRule.clean("0123 4567"));
|
||||||
|
// stop parsing at first not numerical char
|
||||||
assertEquals("123", PmidCleaningRule.clean("0123x4567"));
|
assertEquals("123", PmidCleaningRule.clean("0123x4567"));
|
||||||
|
// invalid id leading to empty result
|
||||||
assertEquals("", PmidCleaningRule.clean("abc"));
|
assertEquals("", PmidCleaningRule.clean("abc"));
|
||||||
|
// valid id with zeroes in the number
|
||||||
|
assertEquals("20794075", PmidCleaningRule.clean("20794075"));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,9 +7,11 @@ import java.io.IOException;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.*;
|
||||||
import org.apache.commons.cli.ParseException;
|
import org.apache.commons.cli.ParseException;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
@ -27,23 +29,28 @@ import eu.dnetlib.dhp.actionmanager.opencitations.model.COCI;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.utils.DHPUtils;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class CreateActionSetSparkJob implements Serializable {
|
public class CreateActionSetSparkJob implements Serializable {
|
||||||
public static final String OPENCITATIONS_CLASSID = "sysimport:crosswalk:opencitations";
|
public static final String OPENCITATIONS_CLASSID = "sysimport:crosswalk:opencitations";
|
||||||
public static final String OPENCITATIONS_CLASSNAME = "Imported from OpenCitations";
|
public static final String OPENCITATIONS_CLASSNAME = "Imported from OpenCitations";
|
||||||
|
|
||||||
|
// DOI-to-DOI citations
|
||||||
|
public static final String COCI = "COCI";
|
||||||
|
|
||||||
|
// PMID-to-PMID citations
|
||||||
|
public static final String POCI = "POCI";
|
||||||
|
|
||||||
private static final String DOI_PREFIX = "50|doi_________::";
|
private static final String DOI_PREFIX = "50|doi_________::";
|
||||||
|
|
||||||
private static final String PMID_PREFIX = "50|pmid________::";
|
private static final String PMID_PREFIX = "50|pmid________::";
|
||||||
|
|
||||||
private static final String TRUST = "0.91";
|
private static final String TRUST = "0.91";
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
private static final Logger log = LoggerFactory.getLogger(CreateActionSetSparkJob.class);
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
public static void main(final String[] args) throws IOException, ParseException {
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
@ -67,7 +74,7 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
final String inputPath = parser.get("inputPath");
|
final String inputPath = parser.get("inputPath");
|
||||||
log.info("inputPath {}", inputPath.toString());
|
log.info("inputPath {}", inputPath);
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath {}", outputPath);
|
log.info("outputPath {}", outputPath);
|
||||||
|
@ -81,19 +88,16 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
conf,
|
conf,
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> extractContent(spark, inputPath, outputPath, shouldDuplicateRels));
|
||||||
extractContent(spark, inputPath, outputPath, shouldDuplicateRels);
|
|
||||||
});
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void extractContent(SparkSession spark, String inputPath, String outputPath,
|
private static void extractContent(SparkSession spark, String inputPath, String outputPath,
|
||||||
boolean shouldDuplicateRels) {
|
boolean shouldDuplicateRels) {
|
||||||
|
|
||||||
getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, "COCI")
|
getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, COCI)
|
||||||
.union(getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, "POCI"))
|
.union(getTextTextJavaPairRDD(spark, inputPath, shouldDuplicateRels, POCI))
|
||||||
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
|
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDD(SparkSession spark, String inputPath,
|
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDD(SparkSession spark, String inputPath,
|
||||||
|
@ -109,7 +113,7 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
value, shouldDuplicateRels, prefix)
|
value, shouldDuplicateRels, prefix)
|
||||||
.iterator(),
|
.iterator(),
|
||||||
Encoders.bean(Relation.class))
|
Encoders.bean(Relation.class))
|
||||||
.filter((FilterFunction<Relation>) value -> value != null)
|
.filter((FilterFunction<Relation>) Objects::nonNull)
|
||||||
.toJavaRDD()
|
.toJavaRDD()
|
||||||
.map(p -> new AtomicAction(p.getClass(), p))
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
|
@ -123,20 +127,28 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
String prefix;
|
String prefix;
|
||||||
String citing;
|
String citing;
|
||||||
String cited;
|
String cited;
|
||||||
if (p.equals("COCI")) {
|
|
||||||
|
switch (p) {
|
||||||
|
case COCI:
|
||||||
prefix = DOI_PREFIX;
|
prefix = DOI_PREFIX;
|
||||||
citing = prefix
|
citing = prefix
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", value.getCiting()));
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getCiting()));
|
||||||
cited = prefix
|
cited = prefix
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", value.getCited()));
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.doi.toString(), value.getCited()));
|
||||||
} else {
|
break;
|
||||||
|
case POCI:
|
||||||
prefix = PMID_PREFIX;
|
prefix = PMID_PREFIX;
|
||||||
citing = prefix
|
citing = prefix
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("pmid", value.getCiting()));
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), value.getCiting()));
|
||||||
cited = prefix
|
cited = prefix
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("pmid", value.getCited()));
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue(PidType.pmid.toString(), value.getCited()));
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new IllegalStateException("Invalid prefix: " + p);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!citing.equals(cited)) {
|
if (!citing.equals(cited)) {
|
||||||
|
@ -162,7 +174,7 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
public static Relation getRelation(
|
public static Relation getRelation(
|
||||||
String source,
|
String source,
|
||||||
String target,
|
String target,
|
||||||
String relclass) {
|
String relClass) {
|
||||||
|
|
||||||
return OafMapperUtils
|
return OafMapperUtils
|
||||||
.getRelation(
|
.getRelation(
|
||||||
|
@ -170,7 +182,7 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
target,
|
target,
|
||||||
ModelConstants.RESULT_RESULT,
|
ModelConstants.RESULT_RESULT,
|
||||||
ModelConstants.CITATION,
|
ModelConstants.CITATION,
|
||||||
relclass,
|
relClass,
|
||||||
Arrays
|
Arrays
|
||||||
.asList(
|
.asList(
|
||||||
OafMapperUtils.keyValue(ModelConstants.OPENOCITATIONS_ID, ModelConstants.OPENOCITATIONS_NAME)),
|
OafMapperUtils.keyValue(ModelConstants.OPENOCITATIONS_ID, ModelConstants.OPENOCITATIONS_NAME)),
|
||||||
|
@ -183,6 +195,6 @@ public class CreateActionSetSparkJob implements Serializable {
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
ModelConstants.DNET_PROVENANCE_ACTIONS, ModelConstants.DNET_PROVENANCE_ACTIONS),
|
||||||
TRUST),
|
TRUST),
|
||||||
null);
|
null);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.actionmanager.opencitations;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.zip.GZIPOutputStream;
|
import java.util.zip.GZIPOutputStream;
|
||||||
import java.util.zip.ZipEntry;
|
import java.util.zip.ZipEntry;
|
||||||
|
@ -37,7 +38,7 @@ public class GetOpenCitationsRefs implements Serializable {
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
||||||
final String[] inputFile = parser.get("inputFile").split(";");
|
final String[] inputFile = parser.get("inputFile").split(";");
|
||||||
log.info("inputFile {}", inputFile.toString());
|
log.info("inputFile {}", Arrays.asList(inputFile));
|
||||||
|
|
||||||
final String workingPath = parser.get("workingPath");
|
final String workingPath = parser.get("workingPath");
|
||||||
log.info("workingPath {}", workingPath);
|
log.info("workingPath {}", workingPath);
|
||||||
|
|
|
@ -7,6 +7,7 @@ import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
@ -42,7 +43,7 @@ public class ReadCOCI implements Serializable {
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
final String[] inputFile = parser.get("inputFile").split(";");
|
final String[] inputFile = parser.get("inputFile").split(";");
|
||||||
log.info("inputFile {}", inputFile.toString());
|
log.info("inputFile {}", Arrays.asList(inputFile));
|
||||||
Boolean isSparkSessionManaged = isSparkSessionManaged(parser);
|
Boolean isSparkSessionManaged = isSparkSessionManaged(parser);
|
||||||
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
@ -74,10 +75,10 @@ public class ReadCOCI implements Serializable {
|
||||||
|
|
||||||
private static void doRead(SparkSession spark, String workingPath, String[] inputFiles,
|
private static void doRead(SparkSession spark, String workingPath, String[] inputFiles,
|
||||||
String outputPath,
|
String outputPath,
|
||||||
String delimiter, String format) throws IOException {
|
String delimiter, String format) {
|
||||||
|
|
||||||
for (String inputFile : inputFiles) {
|
for (String inputFile : inputFiles) {
|
||||||
String p_string = workingPath + "/" + inputFile + ".gz";
|
String pString = workingPath + "/" + inputFile + ".gz";
|
||||||
|
|
||||||
Dataset<Row> cociData = spark
|
Dataset<Row> cociData = spark
|
||||||
.read()
|
.read()
|
||||||
|
@ -86,7 +87,7 @@ public class ReadCOCI implements Serializable {
|
||||||
.option("inferSchema", "true")
|
.option("inferSchema", "true")
|
||||||
.option("header", "true")
|
.option("header", "true")
|
||||||
.option("quotes", "\"")
|
.option("quotes", "\"")
|
||||||
.load(p_string)
|
.load(pString)
|
||||||
.repartition(100);
|
.repartition(100);
|
||||||
|
|
||||||
cociData.map((MapFunction<Row, COCI>) row -> {
|
cociData.map((MapFunction<Row, COCI>) row -> {
|
||||||
|
|
|
@ -16,15 +16,11 @@
|
||||||
"paramLongName": "isSparkSessionManaged",
|
"paramLongName": "isSparkSessionManaged",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
}, {
|
},
|
||||||
|
{
|
||||||
"paramName": "sdr",
|
"paramName": "sdr",
|
||||||
"paramLongName": "shouldDuplicateRels",
|
"paramLongName": "shouldDuplicateRels",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "activates/deactivates the construction of bidirectional relations Cites/IsCitedBy",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
},{
|
|
||||||
"paramName": "p",
|
|
||||||
"paramLongName": "prefix",
|
|
||||||
"paramDescription": "the hdfs name node",
|
|
||||||
"paramRequired": true
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -34,6 +34,7 @@
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<action name="download">
|
<action name="download">
|
||||||
<shell xmlns="uri:oozie:shell-action:0.2">
|
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
@ -54,6 +55,7 @@
|
||||||
<ok to="extract"/>
|
<ok to="extract"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
<action name="extract">
|
<action name="extract">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.actionmanager.opencitations.GetOpenCitationsRefs</main-class>
|
<main-class>eu.dnetlib.dhp.actionmanager.opencitations.GetOpenCitationsRefs</main-class>
|
||||||
|
@ -112,7 +114,6 @@
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${workingPath}</arg>
|
<arg>--inputPath</arg><arg>${workingPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
<arg>--prefix</arg><arg>${prefix}</arg>
|
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -0,0 +1,940 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": "100007630",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100007630",
|
||||||
|
"name": "College of Engineering and Informatics, National University of Ireland, Galway",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100007731",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100007731",
|
||||||
|
"name": "Endo International",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100008099",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100008099",
|
||||||
|
"name": "Food Safety Authority of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100008124",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100008124",
|
||||||
|
"name": "Department of Jobs, Enterprise and Innovation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100009098",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100009098",
|
||||||
|
"name": "Department of Foreign Affairs and Trade, Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100009099",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100009099",
|
||||||
|
"name": "Irish Aid",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100009770",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100009770",
|
||||||
|
"name": "National University of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100009985",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100009985",
|
||||||
|
"name": "Parkinson's Association of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100010399",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100010399",
|
||||||
|
"name": "European Society of Cataract and Refractive Surgeons",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100010414",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100010414",
|
||||||
|
"name": "Health Research Board",
|
||||||
|
"synonym": [
|
||||||
|
"501100001590"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100010546",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100010546",
|
||||||
|
"name": "Deparment of Children and Youth Affairs, Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100010993",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100010993",
|
||||||
|
"name": "Irish Nephrology Society",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100011062",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100011062",
|
||||||
|
"name": "Asian Spinal Cord Network",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100011096",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100011096",
|
||||||
|
"name": "Jazz Pharmaceuticals",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100011396",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100011396",
|
||||||
|
"name": "Irish College of General Practitioners",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012734",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012734",
|
||||||
|
"name": "Department for Culture, Heritage and the Gaeltacht, Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012754",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012754",
|
||||||
|
"name": "Horizon Pharma",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012891",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012891",
|
||||||
|
"name": "Medical Research Charities Group",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012919",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012919",
|
||||||
|
"name": "Epilepsy Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012920",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012920",
|
||||||
|
"name": "GLEN",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012921",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012921",
|
||||||
|
"name": "Royal College of Surgeons in Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013029",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013029",
|
||||||
|
"name": "Iris O'Brien Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013206",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013206",
|
||||||
|
"name": "Food Institutional Research Measure",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013381",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013381",
|
||||||
|
"name": "Irish Phytochemical Food Network",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013433",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013433",
|
||||||
|
"name": "Transport Infrastructure Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013461",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013461",
|
||||||
|
"name": "Arts and Disability Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013548",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013548",
|
||||||
|
"name": "Filmbase",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100013917",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100013917",
|
||||||
|
"name": "Society for Musicology in Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100014251",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100014251",
|
||||||
|
"name": "Humanities in the European Research Area",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100014364",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100014364",
|
||||||
|
"name": "National Children's Research Centre",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100014384",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100014384",
|
||||||
|
"name": "Amarin Corporation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100014902",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100014902",
|
||||||
|
"name": "Irish Association for Cancer Research",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015023",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015023",
|
||||||
|
"name": "Ireland Funds",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015037",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015037",
|
||||||
|
"name": "Simon Cumbers Media Fund",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015319",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015319",
|
||||||
|
"name": "Sport Ireland Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015320",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015320",
|
||||||
|
"name": "Paralympics Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015442",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015442",
|
||||||
|
"name": "Global Brain Health Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015776",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015776",
|
||||||
|
"name": "Health and Social Care Board",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015992",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015992",
|
||||||
|
"name": "St. Luke's Institute of Cancer Research",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100017897",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100017897",
|
||||||
|
"name": "Friedreich\u2019s Ataxia Research Alliance Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018064",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018064",
|
||||||
|
"name": "Department of Tourism, Culture, Arts, Gaeltacht, Sport and Media",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018172",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018172",
|
||||||
|
"name": "Department of the Environment, Climate and Communications",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018175",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018175",
|
||||||
|
"name": "Dairy Processing Technology Centre",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018270",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018270",
|
||||||
|
"name": "Health Service Executive",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018529",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018529",
|
||||||
|
"name": "Alkermes",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018542",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018542",
|
||||||
|
"name": "Irish Endocrine Society",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018754",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018754",
|
||||||
|
"name": "An Roinn Sl\u00e1inte",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100018998",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100018998",
|
||||||
|
"name": "Irish Research eLibrary",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100019428",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100019428",
|
||||||
|
"name": "Nabriva Therapeutics",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100019637",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100019637",
|
||||||
|
"name": "Horizon Therapeutics",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100020174",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100020174",
|
||||||
|
"name": "Health Research Charities Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100020202",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100020202",
|
||||||
|
"name": "UCD Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100020233",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100020233",
|
||||||
|
"name": "Ireland Canada University Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100022943",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100022943",
|
||||||
|
"name": "National Cancer Registry Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001581",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001581",
|
||||||
|
"name": "Arts Council of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001582",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001582",
|
||||||
|
"name": "Centre for Ageing Research and Development in Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001583",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001583",
|
||||||
|
"name": "Cystinosis Foundation Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001584",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001584",
|
||||||
|
"name": "Department of Agriculture, Food and the Marine, Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001586",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001586",
|
||||||
|
"name": "Department of Education and Skills, Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001587",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001587",
|
||||||
|
"name": "Economic and Social Research Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001588",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001588",
|
||||||
|
"name": "Enterprise Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001589",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001589",
|
||||||
|
"name": "Environmental Protection Agency",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001591",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001591",
|
||||||
|
"name": "Heritage Council",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001592",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001592",
|
||||||
|
"name": "Higher Education Authority",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001593",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001593",
|
||||||
|
"name": "Irish Cancer Society",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001594",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001594",
|
||||||
|
"name": "Irish Heart Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001595",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001595",
|
||||||
|
"name": "Irish Hospice Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001596",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001596",
|
||||||
|
"name": "Irish Research Council for Science, Engineering and Technology",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001597",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001597",
|
||||||
|
"name": "Irish Research Council for the Humanities and Social Sciences",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001598",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001598",
|
||||||
|
"name": "Mental Health Commission",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001600",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001600",
|
||||||
|
"name": "Research and Education Foundation, Sligo General Hospital",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001601",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001601",
|
||||||
|
"name": "Royal Irish Academy",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001603",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001603",
|
||||||
|
"name": "Sustainable Energy Authority of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001604",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001604",
|
||||||
|
"name": "Teagasc",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001627",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001627",
|
||||||
|
"name": "Marine Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001628",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001628",
|
||||||
|
"name": "Central Remedial Clinic",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001629",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001629",
|
||||||
|
"name": "Royal Dublin Society",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001630",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001630",
|
||||||
|
"name": "Dublin Institute for Advanced Studies",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001631",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001631",
|
||||||
|
"name": "University College Dublin",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001633",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001633",
|
||||||
|
"name": "National University of Ireland, Maynooth",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001634",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001634",
|
||||||
|
"name": "University of Galway",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001635",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001635",
|
||||||
|
"name": "University of Limerick",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001636",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001636",
|
||||||
|
"name": "University College Cork",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001637",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001637",
|
||||||
|
"name": "Trinity College Dublin",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001638",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001638",
|
||||||
|
"name": "Dublin City University",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100002081",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100002081",
|
||||||
|
"name": "Irish Research Council",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100002736",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100002736",
|
||||||
|
"name": "Covidien",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100002755",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100002755",
|
||||||
|
"name": "Brennan and Company",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100002919",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100002919",
|
||||||
|
"name": "Cork Institute of Technology",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100002959",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100002959",
|
||||||
|
"name": "Dublin City Council",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100003036",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100003036",
|
||||||
|
"name": "Perrigo Company Charitable Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100003037",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100003037",
|
||||||
|
"name": "Elan",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100003496",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100003496",
|
||||||
|
"name": "HeyStaks Technologies",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100003553",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100003553",
|
||||||
|
"name": "Gaelic Athletic Association",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100003840",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100003840",
|
||||||
|
"name": "Irish Institute of Clinical Neuroscience",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100003956",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100003956",
|
||||||
|
"name": "Aspect Medical Systems",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100004162",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100004162",
|
||||||
|
"name": "Meath Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100004210",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100004210",
|
||||||
|
"name": "Our Lady's Children's Hospital, Crumlin",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100004321",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100004321",
|
||||||
|
"name": "Shire",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100004981",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100004981",
|
||||||
|
"name": "Athlone Institute of Technology",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100006518",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100006518",
|
||||||
|
"name": "Department of Communications, Energy and Natural Resources, Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100006553",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100006553",
|
||||||
|
"name": "Collaborative Centre for Applied Nanotechnology",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100006759",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100006759",
|
||||||
|
"name": "CLARITY Centre for Sensor Web Technologies",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100009246",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100009246",
|
||||||
|
"name": "Technological University Dublin",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100009269",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100009269",
|
||||||
|
"name": "Programme of Competitive Forestry Research for Development",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100009315",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100009315",
|
||||||
|
"name": "Cystinosis Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100010808",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100010808",
|
||||||
|
"name": "Geological Survey of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100011030",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100011030",
|
||||||
|
"name": "Alimentary Glycoscience Research Cluster",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100011031",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100011031",
|
||||||
|
"name": "Alimentary Health",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100011103",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100011103",
|
||||||
|
"name": "Rann\u00eds",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100012354",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100012354",
|
||||||
|
"name": "Inland Fisheries Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100014384",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100014384",
|
||||||
|
"name": "X-Bolt Orthopaedics",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100014710",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100014710",
|
||||||
|
"name": "PrecisionBiotics Group",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100014827",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100014827",
|
||||||
|
"name": "Dormant Accounts Fund",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100016041",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100016041",
|
||||||
|
"name": "St Vincents Anaesthesia Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100017501",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100017501",
|
||||||
|
"name": "FotoNation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100018641",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100018641",
|
||||||
|
"name": "Dairy Research Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100018839",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100018839",
|
||||||
|
"name": "Irish Centre for High-End Computing",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100019905",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100019905",
|
||||||
|
"name": "Galway University Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020036",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020036",
|
||||||
|
"name": "Dystonia Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020221",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020221",
|
||||||
|
"name": "Irish Motor Neurone Disease Association",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020270",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020270",
|
||||||
|
"name": "Advanced Materials and Bioengineering Research",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020403",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020403",
|
||||||
|
"name": "Irish Composites Centre",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020425",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020425",
|
||||||
|
"name": "Irish Thoracic Society",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100021102",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100021102",
|
||||||
|
"name": "Waterford Institute of Technology",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100021110",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100021110",
|
||||||
|
"name": "Irish MPS Society",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100021525",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100021525",
|
||||||
|
"name": "Insight SFI Research Centre for Data Analytics",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100021694",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100021694",
|
||||||
|
"name": "Elan Pharma International",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100021838",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100021838",
|
||||||
|
"name": "Royal College of Physicians of Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100022542",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100022542",
|
||||||
|
"name": "Breakthrough Cancer Research",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100022610",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100022610",
|
||||||
|
"name": "Breast Cancer Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100022728",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100022728",
|
||||||
|
"name": "Munster Technological University",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100022729",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100022729",
|
||||||
|
"name": "Institute of Technology, Tralee",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100023273",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100023273",
|
||||||
|
"name": "HRB Clinical Research Facility Galway",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100023378",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100023378",
|
||||||
|
"name": "Lauritzson Foundation",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100023551",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100023551",
|
||||||
|
"name": "Cystic Fibrosis Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100023970",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100023970",
|
||||||
|
"name": "Tyndall National Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100024094",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100024094",
|
||||||
|
"name": "Raidi\u00f3 Teilif\u00eds \u00c9ireann",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100024242",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100024242",
|
||||||
|
"name": "Synthesis and Solid State Pharmaceutical Centre",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100024313",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100024313",
|
||||||
|
"name": "Irish Rugby Football Union",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100007490",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100007490",
|
||||||
|
"name": "Bausch and Lomb Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100007819",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100007819",
|
||||||
|
"name": "Allergan",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100010547",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100010547",
|
||||||
|
"name": "Irish Youth Justice Service",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100012733",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100012733",
|
||||||
|
"name": "National Parks and Wildlife Service",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100015278",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100015278",
|
||||||
|
"name": "Pfizer Healthcare Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100017144",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100017144",
|
||||||
|
"name": "Shell E and P Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "100022895",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/100022895",
|
||||||
|
"name": "Health Research Institute, University of Limerick",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100001599",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100001599",
|
||||||
|
"name": "National Council for Forest Research and Development",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100006554",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100006554",
|
||||||
|
"name": "IDA Ireland",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100011626",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100011626",
|
||||||
|
"name": "Energy Policy Research Centre, Economic and Social Research Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100014531",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100014531",
|
||||||
|
"name": "Physical Education and Sport Sciences Department, University of Limerick",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100014745",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100014745",
|
||||||
|
"name": "APC Microbiome Institute",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100014826",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100014826",
|
||||||
|
"name": "ADAPT - Centre for Digital Content Technology",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020570",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020570",
|
||||||
|
"name": "College of Medicine, Nursing and Health Sciences, National University of Ireland, Galway",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100020871",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100020871",
|
||||||
|
"name": "Bernal Institute, University of Limerick",
|
||||||
|
"synonym": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "501100023852",
|
||||||
|
"uri": "http://dx.doi.org/10.13039/501100023852",
|
||||||
|
"name": "Moore Institute for Research in the Humanities and Social Studies, University of Galway",
|
||||||
|
"synonym": []
|
||||||
|
}
|
||||||
|
]
|
|
@ -16,6 +16,7 @@ import org.slf4j.{Logger, LoggerFactory}
|
||||||
import java.util
|
import java.util
|
||||||
import scala.collection.JavaConverters._
|
import scala.collection.JavaConverters._
|
||||||
import scala.collection.mutable
|
import scala.collection.mutable
|
||||||
|
import scala.io.Source
|
||||||
import scala.util.matching.Regex
|
import scala.util.matching.Regex
|
||||||
|
|
||||||
case class CrossrefDT(doi: String, json: String, timestamp: Long) {}
|
case class CrossrefDT(doi: String, json: String, timestamp: Long) {}
|
||||||
|
@ -30,11 +31,22 @@ case class mappingAuthor(
|
||||||
affiliation: Option[mappingAffiliation]
|
affiliation: Option[mappingAffiliation]
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
|
case class funderInfo(id:String,uri:String, name:String,synonym:List[String] ) {}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
case class mappingFunder(name: String, DOI: Option[String], award: Option[List[String]]) {}
|
case class mappingFunder(name: String, DOI: Option[String], award: Option[List[String]]) {}
|
||||||
|
|
||||||
case object Crossref2Oaf {
|
case object Crossref2Oaf {
|
||||||
val logger: Logger = LoggerFactory.getLogger(Crossref2Oaf.getClass)
|
val logger: Logger = LoggerFactory.getLogger(Crossref2Oaf.getClass)
|
||||||
|
|
||||||
|
val irishFunder: List[funderInfo] = {
|
||||||
|
val s = Source.fromInputStream(getClass.getResourceAsStream("/eu/dnetlib/dhp/doiboost/crossref/irish_funder.json")).mkString
|
||||||
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
|
lazy val json: org.json4s.JValue = parse(s)
|
||||||
|
json.extract[List[funderInfo]]
|
||||||
|
}
|
||||||
|
|
||||||
val mappingCrossrefType = Map(
|
val mappingCrossrefType = Map(
|
||||||
"book-section" -> "publication",
|
"book-section" -> "publication",
|
||||||
"book" -> "publication",
|
"book" -> "publication",
|
||||||
|
@ -88,6 +100,11 @@ case object Crossref2Oaf {
|
||||||
"report" -> "0017 Report"
|
"report" -> "0017 Report"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def getIrishId(doi:String):Option[String] = {
|
||||||
|
val id =doi.split("/").last
|
||||||
|
irishFunder.find(f => id.equalsIgnoreCase(f.id) || (f.synonym.nonEmpty && f.synonym.exists(s => s.equalsIgnoreCase(id)))).map(f => f.id)
|
||||||
|
}
|
||||||
|
|
||||||
def mappingResult(result: Result, json: JValue, cobjCategory: String): Result = {
|
def mappingResult(result: Result, json: JValue, cobjCategory: String): Result = {
|
||||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
|
|
||||||
|
@ -467,6 +484,14 @@ case object Crossref2Oaf {
|
||||||
if (funders != null)
|
if (funders != null)
|
||||||
funders.foreach(funder => {
|
funders.foreach(funder => {
|
||||||
if (funder.DOI.isDefined && funder.DOI.get.nonEmpty) {
|
if (funder.DOI.isDefined && funder.DOI.get.nonEmpty) {
|
||||||
|
|
||||||
|
if (getIrishId(funder.DOI.get).isDefined) {
|
||||||
|
val nsPrefix = getIrishId(funder.DOI.get).get.padTo(12, '_')
|
||||||
|
val targetId = getProjectId(nsPrefix, "1e5e62235d094afd01cd56e65112fc63")
|
||||||
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
|
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
||||||
|
}
|
||||||
|
|
||||||
funder.DOI.get match {
|
funder.DOI.get match {
|
||||||
case "10.13039/100010663" | "10.13039/100010661" | "10.13039/501100007601" | "10.13039/501100000780" |
|
case "10.13039/100010663" | "10.13039/100010661" | "10.13039/501100007601" | "10.13039/501100000780" |
|
||||||
"10.13039/100010665" =>
|
"10.13039/100010665" =>
|
||||||
|
|
|
@ -0,0 +1,110 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<parent>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-workflows</artifactId>
|
||||||
|
<version>1.2.5-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<artifactId>dhp-swh</artifactId>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-core_${scala.binary.version}</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.spark</groupId>
|
||||||
|
<artifactId>spark-sql_${scala.binary.version}</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<artifactId>dhp-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>net.sf.saxon</groupId>
|
||||||
|
<artifactId>Saxon-HE</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>dom4j</groupId>
|
||||||
|
<artifactId>dom4j</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>xml-apis</groupId>
|
||||||
|
<artifactId>xml-apis</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>jaxen</groupId>
|
||||||
|
<artifactId>jaxen</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-distcp</artifactId>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib</groupId>
|
||||||
|
<artifactId>dnet-actionmanager-api</artifactId>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>eu.dnetlib</groupId>
|
||||||
|
<artifactId>dnet-actionmanager-common</artifactId>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>eu.dnetlib</groupId>
|
||||||
|
<artifactId>dnet-openaireplus-mapping-utils</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>saxonica</groupId>
|
||||||
|
<artifactId>saxon</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>saxonica</groupId>
|
||||||
|
<artifactId>saxon-dom</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>jgrapht</groupId>
|
||||||
|
<artifactId>jgrapht</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>net.sf.ehcache</groupId>
|
||||||
|
<artifactId>ehcache</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.springframework</groupId>
|
||||||
|
<artifactId>spring-test</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.*</groupId>
|
||||||
|
<artifactId>*</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>apache</groupId>
|
||||||
|
<artifactId>*</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
|
<artifactId>httpclient</artifactId>
|
||||||
|
<version>4.5.13</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.datanucleus</groupId>
|
||||||
|
<artifactId>datanucleus-core</artifactId>
|
||||||
|
<version>3.2.10</version>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
</project>
|
|
@ -0,0 +1,176 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.utils.DHPUtils.getHadoopConfiguration;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.GraphCleaningFunctions;
|
||||||
|
import eu.dnetlib.dhp.swh.models.LastVisitData;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConnection;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConstants;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends archive requests to the SWH API for those software repository URLs that are missing from them
|
||||||
|
*
|
||||||
|
* @author Serafeim Chatzopoulos
|
||||||
|
*/
|
||||||
|
public class ArchiveRepositoryURLs {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(ArchiveRepositoryURLs.class);
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SWHConnection swhConnection = null;
|
||||||
|
|
||||||
|
public static void main(final String[] args) throws IOException, ParseException {
|
||||||
|
final ArgumentApplicationParser argumentParser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
CollectLastVisitRepositoryData.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/swh/input_archive_repository_urls.json")));
|
||||||
|
argumentParser.parseArgument(args);
|
||||||
|
|
||||||
|
final String hdfsuri = argumentParser.get("namenode");
|
||||||
|
log.info("hdfsURI: {}", hdfsuri);
|
||||||
|
|
||||||
|
final String inputPath = argumentParser.get("lastVisitsPath");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = argumentParser.get("archiveRequestsPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
final Integer archiveThresholdInDays = Integer.parseInt(argumentParser.get("archiveThresholdInDays"));
|
||||||
|
log.info("archiveThresholdInDays: {}", archiveThresholdInDays);
|
||||||
|
|
||||||
|
final String apiAccessToken = argumentParser.get("apiAccessToken");
|
||||||
|
log.info("apiAccessToken: {}", apiAccessToken);
|
||||||
|
|
||||||
|
final HttpClientParams clientParams = SWHUtils.getClientParams(argumentParser);
|
||||||
|
|
||||||
|
swhConnection = new SWHConnection(clientParams, apiAccessToken);
|
||||||
|
|
||||||
|
final FileSystem fs = FileSystem.get(getHadoopConfiguration(hdfsuri));
|
||||||
|
|
||||||
|
archive(fs, inputPath, outputPath, archiveThresholdInDays);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void archive(FileSystem fs, String inputPath, String outputPath, Integer archiveThresholdInDays)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
|
SequenceFile.Reader fr = SWHUtils.getSequenceFileReader(fs, inputPath);
|
||||||
|
SequenceFile.Writer fw = SWHUtils.getSequenceFileWriter(fs, outputPath);
|
||||||
|
|
||||||
|
// Create key and value objects to hold data
|
||||||
|
Text repoUrl = new Text();
|
||||||
|
Text lastVisitData = new Text();
|
||||||
|
|
||||||
|
// Read key-value pairs from the SequenceFile and handle appropriately
|
||||||
|
while (fr.next(repoUrl, lastVisitData)) {
|
||||||
|
|
||||||
|
String response = null;
|
||||||
|
try {
|
||||||
|
response = handleRecord(repoUrl.toString(), lastVisitData.toString(), archiveThresholdInDays);
|
||||||
|
} catch (java.text.ParseException e) {
|
||||||
|
log.error("Could not handle record with repo Url: {}", repoUrl.toString());
|
||||||
|
throw new RuntimeException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
// response is equal to null when no need for request
|
||||||
|
if (response != null) {
|
||||||
|
SWHUtils.appendToSequenceFile(fw, repoUrl.toString(), response);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close readers
|
||||||
|
fw.close();
|
||||||
|
fr.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String handleRecord(String repoUrl, String lastVisitData, Integer archiveThresholdInDays)
|
||||||
|
throws IOException, java.text.ParseException {
|
||||||
|
|
||||||
|
log.info("{ Key: {}, Value: {} }", repoUrl, lastVisitData);
|
||||||
|
|
||||||
|
LastVisitData lastVisit = OBJECT_MAPPER.readValue(lastVisitData, LastVisitData.class);
|
||||||
|
|
||||||
|
// a previous attempt for archival has been made, and repository URL was not found
|
||||||
|
// avoid performing the same archive request again
|
||||||
|
if (lastVisit.getStatus() != null &&
|
||||||
|
lastVisit.getStatus().equals(SWHConstants.VISIT_STATUS_NOT_FOUND)) {
|
||||||
|
|
||||||
|
log.info("Avoid request -- previous archive request returned NOT_FOUND");
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we have last visit data
|
||||||
|
if (lastVisit.getSnapshot() != null) {
|
||||||
|
|
||||||
|
String cleanDate = GraphCleaningFunctions.cleanDate(lastVisit.getDate());
|
||||||
|
|
||||||
|
// and the last visit date can be parsed
|
||||||
|
if (cleanDate != null) {
|
||||||
|
|
||||||
|
SimpleDateFormat formatter = new SimpleDateFormat(ModelSupport.DATE_FORMAT);
|
||||||
|
Date lastVisitDate = formatter.parse(cleanDate);
|
||||||
|
|
||||||
|
// OR last visit time < (now() - archiveThresholdInDays)
|
||||||
|
long diffInMillies = Math.abs((new Date()).getTime() - lastVisitDate.getTime());
|
||||||
|
long diffInDays = TimeUnit.DAYS.convert(diffInMillies, TimeUnit.MILLISECONDS);
|
||||||
|
log.info("Date diff from now (in days): {}", diffInDays);
|
||||||
|
|
||||||
|
// do not perform a request, if the last visit date is no older than $archiveThresholdInDays
|
||||||
|
if (archiveThresholdInDays >= diffInDays) {
|
||||||
|
log.info("Avoid request -- no older than {} days", archiveThresholdInDays);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ELSE perform an archive request
|
||||||
|
log.info("Perform archive request for: {}", repoUrl);
|
||||||
|
|
||||||
|
// if last visit data are available, re-use version control type,
|
||||||
|
// else use the default one (i.e., git)
|
||||||
|
String visitType = Optional
|
||||||
|
.ofNullable(lastVisit.getType())
|
||||||
|
.orElse(SWHConstants.DEFAULT_VISIT_TYPE);
|
||||||
|
|
||||||
|
URL url = new URL(String.format(SWHConstants.SWH_ARCHIVE_URL, visitType, repoUrl.trim()));
|
||||||
|
|
||||||
|
log.info("Sending archive request: {}", url);
|
||||||
|
|
||||||
|
String response;
|
||||||
|
try {
|
||||||
|
response = swhConnection.call(url.toString());
|
||||||
|
} catch (CollectorException e) {
|
||||||
|
log.error("Error in request: {}", url);
|
||||||
|
response = "{}";
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,119 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.utils.DHPUtils.getHadoopConfiguration;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.URL;
|
||||||
|
|
||||||
|
import org.apache.commons.cli.ParseException;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConnection;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConstants;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a file with software repository URLs, this class
|
||||||
|
* collects last visit data from the Software Heritage API.
|
||||||
|
*
|
||||||
|
* @author Serafeim Chatzopoulos
|
||||||
|
*/
|
||||||
|
public class CollectLastVisitRepositoryData {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CollectLastVisitRepositoryData.class);
|
||||||
|
private static SWHConnection swhConnection = null;
|
||||||
|
|
||||||
|
public static void main(final String[] args)
|
||||||
|
throws IOException, ParseException {
|
||||||
|
final ArgumentApplicationParser argumentParser = new ArgumentApplicationParser(
|
||||||
|
IOUtils
|
||||||
|
.toString(
|
||||||
|
CollectLastVisitRepositoryData.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/swh/input_collect_last_visit_repository_data.json")));
|
||||||
|
argumentParser.parseArgument(args);
|
||||||
|
|
||||||
|
log.info("Java Xmx: {}m", Runtime.getRuntime().maxMemory() / (1024 * 1024));
|
||||||
|
|
||||||
|
final String hdfsuri = argumentParser.get("namenode");
|
||||||
|
log.info("hdfsURI: {}", hdfsuri);
|
||||||
|
|
||||||
|
final String inputPath = argumentParser.get("softwareCodeRepositoryURLs");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
|
final String outputPath = argumentParser.get("lastVisitsPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
final String apiAccessToken = argumentParser.get("apiAccessToken");
|
||||||
|
log.info("apiAccessToken: {}", apiAccessToken);
|
||||||
|
|
||||||
|
final HttpClientParams clientParams = SWHUtils.getClientParams(argumentParser);
|
||||||
|
|
||||||
|
swhConnection = new SWHConnection(clientParams, apiAccessToken);
|
||||||
|
|
||||||
|
final FileSystem fs = FileSystem.get(getHadoopConfiguration(hdfsuri));
|
||||||
|
|
||||||
|
collect(fs, inputPath, outputPath);
|
||||||
|
|
||||||
|
fs.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void collect(FileSystem fs, String inputPath, String outputPath)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
|
SequenceFile.Writer fw = SWHUtils.getSequenceFileWriter(fs, outputPath);
|
||||||
|
|
||||||
|
// Specify the HDFS directory path you want to read
|
||||||
|
Path directoryPath = new Path(inputPath);
|
||||||
|
|
||||||
|
// List all files in the directory
|
||||||
|
FileStatus[] partStatuses = fs.listStatus(directoryPath);
|
||||||
|
|
||||||
|
for (FileStatus partStatus : partStatuses) {
|
||||||
|
|
||||||
|
// Check if it's a file (not a directory)
|
||||||
|
if (partStatus.isFile()) {
|
||||||
|
handleFile(fs, partStatus.getPath(), fw);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
fw.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void handleFile(FileSystem fs, Path partInputPath, SequenceFile.Writer fw)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
|
BufferedReader br = SWHUtils.getFileReader(fs, partInputPath);
|
||||||
|
|
||||||
|
String repoUrl;
|
||||||
|
while ((repoUrl = br.readLine()) != null) {
|
||||||
|
|
||||||
|
URL url = new URL(String.format(SWHConstants.SWH_LATEST_VISIT_URL, repoUrl.trim()));
|
||||||
|
|
||||||
|
String response;
|
||||||
|
try {
|
||||||
|
response = swhConnection.call(url.toString());
|
||||||
|
} catch (CollectorException e) {
|
||||||
|
log.error("Error in request: {}", url);
|
||||||
|
response = "{}";
|
||||||
|
}
|
||||||
|
|
||||||
|
SWHUtils.appendToSequenceFile(fw, repoUrl, response);
|
||||||
|
}
|
||||||
|
|
||||||
|
br.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,93 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkHiveSession;
|
||||||
|
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Row;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Collects unique software repository URLs in the Graph using Hive
|
||||||
|
*
|
||||||
|
* @author Serafeim Chatzopoulos
|
||||||
|
*/
|
||||||
|
public class CollectSoftwareRepositoryURLs {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(CollectSoftwareRepositoryURLs.class);
|
||||||
|
|
||||||
|
public static <I extends Result> void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
CollectSoftwareRepositoryURLs.class
|
||||||
|
.getResourceAsStream("/eu/dnetlib/dhp/swh/input_collect_software_repository_urls.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String hiveDbName = parser.get("hiveDbName");
|
||||||
|
log.info("hiveDbName: {}", hiveDbName);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("softwareCodeRepositoryURLs");
|
||||||
|
log.info("softwareCodeRepositoryURLs: {}", outputPath);
|
||||||
|
|
||||||
|
final String hiveMetastoreUris = parser.get("hiveMetastoreUris");
|
||||||
|
log.info("hiveMetastoreUris: {}", hiveMetastoreUris);
|
||||||
|
|
||||||
|
final Integer softwareLimit = Integer.parseInt(parser.get("softwareLimit"));
|
||||||
|
log.info("softwareLimit: {}", softwareLimit);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.set("hive.metastore.uris", hiveMetastoreUris);
|
||||||
|
|
||||||
|
runWithSparkHiveSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
doRun(spark, hiveDbName, softwareLimit, outputPath);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <I extends Result> void doRun(SparkSession spark, String hiveDbName, Integer limit,
|
||||||
|
String outputPath) {
|
||||||
|
|
||||||
|
String queryTemplate = "SELECT distinct coderepositoryurl.value " +
|
||||||
|
"FROM %s.software " +
|
||||||
|
"WHERE coderepositoryurl.value IS NOT NULL " +
|
||||||
|
"AND datainfo.deletedbyinference = FALSE " +
|
||||||
|
"AND datainfo.invisible = FALSE ";
|
||||||
|
|
||||||
|
if (limit != null) {
|
||||||
|
queryTemplate += String.format("LIMIT %s", limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
String query = String.format(queryTemplate, hiveDbName);
|
||||||
|
|
||||||
|
log.info("Hive query to fetch software code URLs: {}", query);
|
||||||
|
|
||||||
|
Dataset<Row> df = spark.sql(query);
|
||||||
|
|
||||||
|
// write distinct repository URLs
|
||||||
|
df
|
||||||
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.csv(outputPath);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,185 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
import static org.apache.spark.sql.functions.col;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
|
import org.apache.spark.sql.*;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.types.DataTypes;
|
||||||
|
import org.apache.spark.sql.types.StructField;
|
||||||
|
import org.apache.spark.sql.types.StructType;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
import eu.dnetlib.dhp.swh.models.LastVisitData;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConstants;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates action sets for Software Heritage data
|
||||||
|
*
|
||||||
|
* @author Serafeim Chatzopoulos
|
||||||
|
*/
|
||||||
|
public class PrepareSWHActionsets {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(PrepareSWHActionsets.class);
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
public static <I extends Result> void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
PrepareSWHActionsets.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/swh/input_prepare_swh_actionsets.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final Boolean isSparkSessionManaged = Optional
|
||||||
|
.ofNullable(parser.get("isSparkSessionManaged"))
|
||||||
|
.map(Boolean::valueOf)
|
||||||
|
.orElse(Boolean.TRUE);
|
||||||
|
log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("lastVisitsPath");
|
||||||
|
log.info("inputPath: {}", inputPath);
|
||||||
|
|
||||||
|
final String softwareInputPath = parser.get("softwareInputPath");
|
||||||
|
log.info("softwareInputPath: {}", softwareInputPath);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("actionsetsPath");
|
||||||
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
|
runWithSparkSession(
|
||||||
|
conf,
|
||||||
|
isSparkSessionManaged,
|
||||||
|
spark -> {
|
||||||
|
JavaPairRDD<Text, Text> softwareRDD = prepareActionsets(spark, inputPath, softwareInputPath);
|
||||||
|
softwareRDD
|
||||||
|
.saveAsHadoopFile(
|
||||||
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dataset<Row> loadSWHData(SparkSession spark, String inputPath) {
|
||||||
|
|
||||||
|
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
// read from file and transform to <origin, snapshotId> tuples
|
||||||
|
// Note: snapshot id is the SWH id for us
|
||||||
|
JavaRDD<Row> swhRDD = sc
|
||||||
|
.sequenceFile(inputPath, Text.class, Text.class)
|
||||||
|
.map(t -> t._2().toString())
|
||||||
|
.map(t -> OBJECT_MAPPER.readValue(t, LastVisitData.class))
|
||||||
|
.filter(t -> t.getOrigin() != null && t.getSnapshot() != null) // response from SWH API is empty if repo URL
|
||||||
|
// was not found
|
||||||
|
.map(item -> RowFactory.create(item.getOrigin(), item.getSnapshot()));
|
||||||
|
|
||||||
|
// convert RDD to 2-column DF
|
||||||
|
List<StructField> fields = Arrays
|
||||||
|
.asList(
|
||||||
|
DataTypes.createStructField("repoUrl", DataTypes.StringType, true),
|
||||||
|
DataTypes.createStructField("swhId", DataTypes.StringType, true));
|
||||||
|
StructType schema = DataTypes.createStructType(fields);
|
||||||
|
|
||||||
|
return spark.createDataFrame(swhRDD, schema);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dataset<Row> loadGraphSoftwareData(SparkSession spark, String softwareInputPath) {
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.textFile(softwareInputPath)
|
||||||
|
.map(
|
||||||
|
(MapFunction<String, Software>) t -> OBJECT_MAPPER.readValue(t, Software.class),
|
||||||
|
Encoders.bean(Software.class))
|
||||||
|
.filter(t -> t.getCodeRepositoryUrl() != null)
|
||||||
|
.select(col("id"), col("codeRepositoryUrl.value").as("repoUrl"));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <I extends Software> JavaPairRDD<Text, Text> prepareActionsets(SparkSession spark, String inputPath,
|
||||||
|
String softwareInputPath) {
|
||||||
|
|
||||||
|
Dataset<Row> swhDF = loadSWHData(spark, inputPath);
|
||||||
|
// swhDF.show(false);
|
||||||
|
|
||||||
|
Dataset<Row> graphSoftwareDF = loadGraphSoftwareData(spark, softwareInputPath);
|
||||||
|
// graphSoftwareDF.show(5);
|
||||||
|
|
||||||
|
Dataset<Row> joinedDF = graphSoftwareDF.join(swhDF, "repoUrl").select("id", "swhid");
|
||||||
|
// joinedDF.show(false);
|
||||||
|
|
||||||
|
return joinedDF.map((MapFunction<Row, Software>) row -> {
|
||||||
|
|
||||||
|
Software s = new Software();
|
||||||
|
|
||||||
|
// set openaire id
|
||||||
|
s.setId(row.getString(row.fieldIndex("id")));
|
||||||
|
|
||||||
|
// set swh id
|
||||||
|
Qualifier qualifier = OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
SWHConstants.SWHID,
|
||||||
|
SWHConstants.SWHID_CLASSNAME,
|
||||||
|
ModelConstants.DNET_PID_TYPES,
|
||||||
|
ModelConstants.DNET_PID_TYPES);
|
||||||
|
|
||||||
|
DataInfo dataInfo = OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false,
|
||||||
|
null,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
ModelConstants.PROVENANCE_ACTION_SET_QUALIFIER,
|
||||||
|
"");
|
||||||
|
|
||||||
|
s
|
||||||
|
.setPid(
|
||||||
|
Arrays
|
||||||
|
.asList(
|
||||||
|
OafMapperUtils
|
||||||
|
.structuredProperty(
|
||||||
|
String.format("swh:1:snp:%s", row.getString(row.fieldIndex("swhid"))),
|
||||||
|
qualifier,
|
||||||
|
dataInfo)));
|
||||||
|
|
||||||
|
// add SWH in the `collectedFrom` field
|
||||||
|
KeyValue kv = new KeyValue();
|
||||||
|
kv.setKey(SWHConstants.SWH_ID);
|
||||||
|
kv.setValue(SWHConstants.SWH_NAME);
|
||||||
|
|
||||||
|
s.setCollectedfrom(Arrays.asList(kv));
|
||||||
|
|
||||||
|
return s;
|
||||||
|
}, Encoders.bean(Software.class))
|
||||||
|
.toJavaRDD()
|
||||||
|
.map(p -> new AtomicAction(Software.class, p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,71 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh.models;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import com.cloudera.com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
|
||||||
|
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||||
|
public class LastVisitData implements Serializable {
|
||||||
|
|
||||||
|
private String origin;
|
||||||
|
private String type;
|
||||||
|
private String date;
|
||||||
|
|
||||||
|
@JsonProperty("snapshot")
|
||||||
|
private String snapshotId;
|
||||||
|
|
||||||
|
private String status;
|
||||||
|
|
||||||
|
public String getType() {
|
||||||
|
return type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setType(String type) {
|
||||||
|
this.type = type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getDate() {
|
||||||
|
return date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDate(String date) {
|
||||||
|
this.date = date;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSnapshot() {
|
||||||
|
return snapshotId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSnapshot(String snapshotId) {
|
||||||
|
this.snapshotId = snapshotId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getStatus() {
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setStatus(String status) {
|
||||||
|
this.status = status;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOrigin() {
|
||||||
|
return origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setOrigin(String origin) {
|
||||||
|
this.origin = origin;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "LastVisitData{" +
|
||||||
|
"origin='" + origin + '\'' +
|
||||||
|
", type='" + type + '\'' +
|
||||||
|
", date='" + date + '\'' +
|
||||||
|
", snapshotId='" + snapshotId + '\'' +
|
||||||
|
", status='" + status + '\'' +
|
||||||
|
'}';
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,40 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh.utils;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.http.HttpHeaders;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpConnector2;
|
||||||
|
|
||||||
|
public class SWHConnection {
|
||||||
|
|
||||||
|
HttpConnector2 conn;
|
||||||
|
|
||||||
|
public SWHConnection(HttpClientParams clientParams, String accessToken) {
|
||||||
|
|
||||||
|
// set custom headers
|
||||||
|
Map<String, String> headers = new HashMap<String, String>() {
|
||||||
|
{
|
||||||
|
put(HttpHeaders.ACCEPT, "application/json");
|
||||||
|
if (accessToken != null) {
|
||||||
|
put(HttpHeaders.AUTHORIZATION, String.format("Bearer %s", accessToken));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
clientParams.setHeaders(headers);
|
||||||
|
|
||||||
|
// create http connector
|
||||||
|
conn = new HttpConnector2(clientParams);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public String call(String url) throws CollectorException {
|
||||||
|
return conn.getInputSource(url);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh.utils;
|
||||||
|
|
||||||
|
public class SWHConstants {
|
||||||
|
public static final String SWH_LATEST_VISIT_URL = "https://archive.softwareheritage.org/api/1/origin/%s/visit/latest/";
|
||||||
|
|
||||||
|
public static final String SWH_ARCHIVE_URL = "https://archive.softwareheritage.org/api/1/origin/save/%s/url/%s/";
|
||||||
|
|
||||||
|
public static final String DEFAULT_VISIT_TYPE = "git";
|
||||||
|
|
||||||
|
public static final String VISIT_STATUS_NOT_FOUND = "not_found";
|
||||||
|
|
||||||
|
public static final String SWHID = "swhid";
|
||||||
|
|
||||||
|
public static final String SWHID_CLASSNAME = "Software Heritage Identifier";
|
||||||
|
|
||||||
|
public static final String SWH_ID = "10|openaire____::dbfd07503aaa1ed31beed7dec942f3f4";
|
||||||
|
|
||||||
|
public static final String SWH_NAME = "Software Heritage";
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,95 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh.utils;
|
||||||
|
|
||||||
|
import static eu.dnetlib.dhp.common.Constants.*;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
|
public class SWHUtils {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(SWHUtils.class);
|
||||||
|
|
||||||
|
public static HttpClientParams getClientParams(ArgumentApplicationParser argumentParser) {
|
||||||
|
|
||||||
|
final HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
clientParams
|
||||||
|
.setMaxNumberOfRetry(
|
||||||
|
Optional
|
||||||
|
.ofNullable(argumentParser.get(MAX_NUMBER_OF_RETRY))
|
||||||
|
.map(Integer::parseInt)
|
||||||
|
.orElse(HttpClientParams._maxNumberOfRetry));
|
||||||
|
log.info("maxNumberOfRetry is {}", clientParams.getMaxNumberOfRetry());
|
||||||
|
|
||||||
|
clientParams
|
||||||
|
.setRequestDelay(
|
||||||
|
Optional
|
||||||
|
.ofNullable(argumentParser.get(REQUEST_DELAY))
|
||||||
|
.map(Integer::parseInt)
|
||||||
|
.orElse(HttpClientParams._requestDelay));
|
||||||
|
log.info("requestDelay is {}", clientParams.getRequestDelay());
|
||||||
|
|
||||||
|
clientParams
|
||||||
|
.setRetryDelay(
|
||||||
|
Optional
|
||||||
|
.ofNullable(argumentParser.get(RETRY_DELAY))
|
||||||
|
.map(Integer::parseInt)
|
||||||
|
.orElse(HttpClientParams._retryDelay));
|
||||||
|
log.info("retryDelay is {}", clientParams.getRetryDelay());
|
||||||
|
|
||||||
|
clientParams
|
||||||
|
.setRequestMethod(
|
||||||
|
Optional
|
||||||
|
.ofNullable(argumentParser.get(REQUEST_METHOD))
|
||||||
|
.orElse(HttpClientParams._requestMethod));
|
||||||
|
log.info("requestMethod is {}", clientParams.getRequestMethod());
|
||||||
|
|
||||||
|
return clientParams;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static BufferedReader getFileReader(FileSystem fs, Path inputPath) throws IOException {
|
||||||
|
FSDataInputStream inputStream = fs.open(inputPath);
|
||||||
|
return new BufferedReader(
|
||||||
|
new InputStreamReader(inputStream, StandardCharsets.UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SequenceFile.Writer getSequenceFileWriter(FileSystem fs, String outputPath) throws IOException {
|
||||||
|
return SequenceFile
|
||||||
|
.createWriter(
|
||||||
|
fs.getConf(),
|
||||||
|
SequenceFile.Writer.file(new Path(outputPath)),
|
||||||
|
SequenceFile.Writer.keyClass(Text.class),
|
||||||
|
SequenceFile.Writer.valueClass(Text.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SequenceFile.Reader getSequenceFileReader(FileSystem fs, String inputPath) throws IOException {
|
||||||
|
Path filePath = new Path(inputPath);
|
||||||
|
SequenceFile.Reader.Option fileOption = SequenceFile.Reader.file(filePath);
|
||||||
|
|
||||||
|
return new SequenceFile.Reader(fs.getConf(), fileOption);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void appendToSequenceFile(SequenceFile.Writer fw, String keyStr, String valueStr) throws IOException {
|
||||||
|
Text key = new Text();
|
||||||
|
key.set(keyStr);
|
||||||
|
|
||||||
|
Text value = new Text();
|
||||||
|
value.set(valueStr);
|
||||||
|
|
||||||
|
fw.append(key, value);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,56 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "n",
|
||||||
|
"paramLongName": "namenode",
|
||||||
|
"paramDescription": "the Name Node URI",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "lv",
|
||||||
|
"paramLongName": "lastVisitsPath",
|
||||||
|
"paramDescription": "the URL where to store last visits data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "arp",
|
||||||
|
"paramLongName": "archiveRequestsPath",
|
||||||
|
"paramDescription": "the URL where to store the responses of the archive requests",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "mnr",
|
||||||
|
"paramLongName": "maxNumberOfRetry",
|
||||||
|
"paramDescription": "the maximum number of admitted connection retries",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rqd",
|
||||||
|
"paramLongName": "requestDelay",
|
||||||
|
"paramDescription": "the delay (ms) between requests",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rtd",
|
||||||
|
"paramLongName": "retryDelay",
|
||||||
|
"paramDescription": "the delay (ms) between retries",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rm",
|
||||||
|
"paramLongName": "requestMethod",
|
||||||
|
"paramDescription": "the method of the requests to perform",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "atid",
|
||||||
|
"paramLongName": "archiveThresholdInDays",
|
||||||
|
"paramDescription": "the thershold (in days) required to issue an archive request",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "aat",
|
||||||
|
"paramLongName": "apiAccessToken",
|
||||||
|
"paramDescription": "the API access token of the SWH API",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,50 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "n",
|
||||||
|
"paramLongName": "namenode",
|
||||||
|
"paramDescription": "the Name Node URI",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "scr",
|
||||||
|
"paramLongName": "softwareCodeRepositoryURLs",
|
||||||
|
"paramDescription": "the URL from where to read software repository URLs",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "lv",
|
||||||
|
"paramLongName": "lastVisitsPath",
|
||||||
|
"paramDescription": "the URL where to store last visits data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "mnr",
|
||||||
|
"paramLongName": "maxNumberOfRetry",
|
||||||
|
"paramDescription": "the maximum number of admitted connection retries",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rqd",
|
||||||
|
"paramLongName": "requestDelay",
|
||||||
|
"paramDescription": "the delay (ms) between requests",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rtd",
|
||||||
|
"paramLongName": "retryDelay",
|
||||||
|
"paramDescription": "the delay (ms) between retries",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "rm",
|
||||||
|
"paramLongName": "requestMethod",
|
||||||
|
"paramDescription": "the method of the requests to perform",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "aat",
|
||||||
|
"paramLongName": "apiAccessToken",
|
||||||
|
"paramDescription": "the API access token of the SWH API",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,32 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "scr",
|
||||||
|
"paramLongName": "softwareCodeRepositoryURLs",
|
||||||
|
"paramDescription": "the URL where to store software repository URLs",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "db",
|
||||||
|
"paramLongName": "hiveDbName",
|
||||||
|
"paramDescription": "the target hive database name",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "hmu",
|
||||||
|
"paramLongName": "hiveMetastoreUris",
|
||||||
|
"paramDescription": "the hive metastore uris",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "slim",
|
||||||
|
"paramLongName": "softwareLimit",
|
||||||
|
"paramDescription": "limit on the number of software repo URL to fetch",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,26 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paramName": "issm",
|
||||||
|
"paramLongName": "isSparkSessionManaged",
|
||||||
|
"paramDescription": "when true will stop SparkSession after job execution",
|
||||||
|
"paramRequired": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "lv",
|
||||||
|
"paramLongName": "lastVisitsPath",
|
||||||
|
"paramDescription": "the URL where to store last visits data",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "ap",
|
||||||
|
"paramLongName": "actionsetsPath",
|
||||||
|
"paramDescription": "the URL path where to store actionsets",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "sip",
|
||||||
|
"paramLongName": "softwareInputPath",
|
||||||
|
"paramDescription": "the URL path of the software in the graph",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,19 @@
|
||||||
|
# hive
|
||||||
|
hiveDbName=openaire_prod_20230914
|
||||||
|
|
||||||
|
# input/output files
|
||||||
|
softwareCodeRepositoryURLs=${workingDir}/1_code_repo_urls.csv
|
||||||
|
lastVisitsPath=${workingDir}/2_last_visits.seq
|
||||||
|
archiveRequestsPath=${workingDir}/3_archive_requests.seq
|
||||||
|
actionsetsPath=${workingDir}/4_actionsets
|
||||||
|
graphPath=/tmp/prod_provision/graph/18_graph_blacklisted
|
||||||
|
|
||||||
|
apiAccessToken=eyJhbGciOiJIUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJhMTMxYTQ1My1hM2IyLTQwMTUtODQ2Ny05MzAyZjk3MTFkOGEifQ.eyJpYXQiOjE2OTQ2MzYwMjAsImp0aSI6IjkwZjdkNTNjLTQ5YTktNGFiMy1hY2E0LTcwMTViMjEyZTNjNiIsImlzcyI6Imh0dHBzOi8vYXV0aC5zb2Z0d2FyZWhlcml0YWdlLm9yZy9hdXRoL3JlYWxtcy9Tb2Z0d2FyZUhlcml0YWdlIiwiYXVkIjoiaHR0cHM6Ly9hdXRoLnNvZnR3YXJlaGVyaXRhZ2Uub3JnL2F1dGgvcmVhbG1zL1NvZnR3YXJlSGVyaXRhZ2UiLCJzdWIiOiIzMTY5OWZkNC0xNmE0LTQxOWItYTdhMi00NjI5MDY4ZjI3OWEiLCJ0eXAiOiJPZmZsaW5lIiwiYXpwIjoic3doLXdlYiIsInNlc3Npb25fc3RhdGUiOiIzMjYzMzEwMS00ZDRkLTQwMjItODU2NC1iMzNlMTJiNTE3ZDkiLCJzY29wZSI6Im9wZW5pZCBvZmZsaW5lX2FjY2VzcyBwcm9maWxlIGVtYWlsIn0.XHj1VIZu1dZ4Ej32-oU84mFmaox9cLNjXosNxwZM0Xs
|
||||||
|
|
||||||
|
maxNumberOfRetry=2
|
||||||
|
retryDelay=1
|
||||||
|
requestDelay=100
|
||||||
|
|
||||||
|
softwareLimit=500
|
||||||
|
|
||||||
|
resume=collect-software-repository-urls
|
|
@ -0,0 +1,54 @@
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>jobTracker</name>
|
||||||
|
<value>yarnRM</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>nameNode</name>
|
||||||
|
<value>hdfs://nameservice1</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.use.system.libpath</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2YarnHistoryServerAddress</name>
|
||||||
|
<value>http://iis-cdh5-test-gw.ocean.icm.edu.pl:18089</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2EventLogDir</name>
|
||||||
|
<value>/user/spark/spark2ApplicationHistory</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2ExtraListeners</name>
|
||||||
|
<value>"com.cloudera.spark.lineage.NavigatorAppListener"</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>spark2SqlQueryExecutionListeners</name>
|
||||||
|
<value>"com.cloudera.spark.lineage.NavigatorQueryListener"</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozieActionShareLibForSpark2</name>
|
||||||
|
<value>spark2</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>resourceManager</name>
|
||||||
|
<value>http://iis-cdh5-test-m2.ocean.icm.edu.pl:8088/cluster</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>sparkSqlWarehouseDir</name>
|
||||||
|
<value>/user/hive/warehouse</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
|
@ -0,0 +1,183 @@
|
||||||
|
<workflow-app name="Software-Heritage-Integration-Workflow" xmlns="uri:oozie:workflow:0.5">
|
||||||
|
|
||||||
|
<!-- Custom parameters -->
|
||||||
|
<parameters>
|
||||||
|
<property>
|
||||||
|
<name>hiveDbName</name>
|
||||||
|
<description>The name of the Hive DB to be used</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>softwareCodeRepositoryURLs</name>
|
||||||
|
<description>The path in the HDFS to save the software repository URLs</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>lastVisitsPath</name>
|
||||||
|
<description>The path in the HDFS to save the responses of the last visit requests</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>archiveRequestsPath</name>
|
||||||
|
<description>The path in the HDFS to save the responses of the archive requests</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>actionsetsPath</name>
|
||||||
|
<description>The path in the HDFS to save the action sets</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>graphPath</name>
|
||||||
|
<description>The path in the HDFS to the base folder of the graph</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>maxNumberOfRetry</name>
|
||||||
|
<description>Max number of retries for failed API calls</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>retryDelay</name>
|
||||||
|
<description>Retry delay for failed requests (in sec)</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>requestDelay</name>
|
||||||
|
<description>Delay between API requests (in ms)</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>apiAccessToken</name>
|
||||||
|
<description>The API Key of the SWH API</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>softwareLimit</name>
|
||||||
|
<description>Limit on the number of repo URLs to use (Optional); for debug purposes</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>resumeFrom</name>
|
||||||
|
<description>Variable that indicates the step to start from</description>
|
||||||
|
</property>
|
||||||
|
</parameters>
|
||||||
|
|
||||||
|
<!-- Global variables -->
|
||||||
|
<global>
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
|
<value>${oozieActionShareLibForSpark2}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>actionsetsPath</name>
|
||||||
|
<value>${actionsetsPath}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>apiAccessToken</name>
|
||||||
|
<value>${apiAccessToken}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
</global>
|
||||||
|
|
||||||
|
<start to="startFrom"/>
|
||||||
|
|
||||||
|
<kill name="Kill">
|
||||||
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
</kill>
|
||||||
|
|
||||||
|
<decision name="startFrom">
|
||||||
|
<switch>
|
||||||
|
<case to="collect-software-repository-urls">${wf:conf('resumeFrom') eq 'collect-software-repository-urls'}</case>
|
||||||
|
<case to="create-swh-actionsets">${wf:conf('resumeFrom') eq 'create-swh-actionsets'}</case>
|
||||||
|
<default to="collect-software-repository-urls"/>
|
||||||
|
</switch>
|
||||||
|
</decision>
|
||||||
|
|
||||||
|
<action name="collect-software-repository-urls">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Collect software repository URLs</name>
|
||||||
|
<class>eu.dnetlib.dhp.swh.CollectSoftwareRepositoryURLs</class>
|
||||||
|
<jar>dhp-swh-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
|
||||||
|
<arg>--softwareCodeRepositoryURLs</arg><arg>${softwareCodeRepositoryURLs}</arg>
|
||||||
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
|
<arg>--softwareLimit</arg><arg>${softwareLimit}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="collect-repository-last-visit-data"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="collect-repository-last-visit-data">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.swh.CollectLastVisitRepositoryData</main-class>
|
||||||
|
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--softwareCodeRepositoryURLs</arg><arg>${softwareCodeRepositoryURLs}</arg>
|
||||||
|
<arg>--lastVisitsPath</arg><arg>${lastVisitsPath}</arg>
|
||||||
|
|
||||||
|
<arg>--maxNumberOfRetry</arg><arg>${maxNumberOfRetry}</arg>
|
||||||
|
<arg>--requestDelay</arg><arg>${requestDelay}</arg>
|
||||||
|
<arg>--retryDelay</arg><arg>${retryDelay}</arg>
|
||||||
|
<arg>--requestMethod</arg><arg>GET</arg>
|
||||||
|
<arg>--apiAccessToken</arg><arg>${apiAccessToken}</arg>
|
||||||
|
|
||||||
|
</java>
|
||||||
|
<ok to="archive-repository-urls"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="archive-repository-urls">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.swh.ArchiveRepositoryURLs</main-class>
|
||||||
|
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--lastVisitsPath</arg><arg>${lastVisitsPath}</arg>
|
||||||
|
<arg>--archiveRequestsPath</arg><arg>${archiveRequestsPath}</arg>
|
||||||
|
<arg>--archiveThresholdInDays</arg><arg>365</arg>
|
||||||
|
|
||||||
|
<arg>--maxNumberOfRetry</arg><arg>${maxNumberOfRetry}</arg>
|
||||||
|
<arg>--requestDelay</arg><arg>${requestDelay}</arg>
|
||||||
|
<arg>--retryDelay</arg><arg>${retryDelay}</arg>
|
||||||
|
<arg>--requestMethod</arg><arg>POST</arg>
|
||||||
|
<arg>--apiAccessToken</arg><arg>${apiAccessToken}</arg>
|
||||||
|
|
||||||
|
</java>
|
||||||
|
<ok to="create-swh-actionsets"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="create-swh-actionsets">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>Create actionsets for SWH data</name>
|
||||||
|
<class>eu.dnetlib.dhp.swh.PrepareSWHActionsets</class>
|
||||||
|
<jar>dhp-swh-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
</spark-opts>
|
||||||
|
|
||||||
|
<arg>--lastVisitsPath</arg><arg>${lastVisitsPath}</arg>
|
||||||
|
<arg>--actionsetsPath</arg><arg>${actionsetsPath}</arg>
|
||||||
|
<arg>--softwareInputPath</arg><arg>${graphPath}/software</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
|
||||||
|
</workflow-app>
|
|
@ -0,0 +1,38 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHUtils;
|
||||||
|
|
||||||
|
public class ArchiveRepositoryURLsTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testArchive() throws IOException, ParseException {
|
||||||
|
String inputPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/swh/lastVisitDataToArchive.csv")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
File file = new File(inputPath);
|
||||||
|
FileReader fr = new FileReader(file);
|
||||||
|
BufferedReader br = new BufferedReader(fr); // creates a buffering character input stream
|
||||||
|
|
||||||
|
String line;
|
||||||
|
while ((line = br.readLine()) != null) {
|
||||||
|
String[] tokens = line.split("\t");
|
||||||
|
|
||||||
|
String response = ArchiveRepositoryURLs.handleRecord(tokens[0], tokens[1], 365);
|
||||||
|
System.out.println(tokens[0] + "\t" + response);
|
||||||
|
System.out.println();
|
||||||
|
}
|
||||||
|
fr.close();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
|
||||||
|
import org.apache.commons.io.FileUtils;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.JavaRDD;
|
||||||
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.sql.Dataset;
|
||||||
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.Row;
|
||||||
|
import org.apache.spark.sql.SparkSession;
|
||||||
|
import org.junit.jupiter.api.AfterAll;
|
||||||
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
import org.junit.jupiter.api.BeforeAll;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
|
||||||
|
public class PrepareSWHActionsetsTest {
|
||||||
|
|
||||||
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
|
private static SparkSession spark;
|
||||||
|
|
||||||
|
private static Path workingDir;
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory
|
||||||
|
.getLogger(PrepareSWHActionsetsTest.class);
|
||||||
|
|
||||||
|
@BeforeAll
|
||||||
|
public static void beforeAll() throws IOException {
|
||||||
|
workingDir = Files.createTempDirectory(PrepareSWHActionsetsTest.class.getSimpleName());
|
||||||
|
|
||||||
|
log.info("Using work dir {}", workingDir);
|
||||||
|
|
||||||
|
SparkConf conf = new SparkConf();
|
||||||
|
conf.setAppName(PrepareSWHActionsetsTest.class.getSimpleName());
|
||||||
|
|
||||||
|
conf.setMaster("local[*]");
|
||||||
|
conf.set("spark.driver.host", "localhost");
|
||||||
|
conf.set("hive.metastore.local", "true");
|
||||||
|
conf.set("spark.ui.enabled", "false");
|
||||||
|
conf.set("spark.sql.warehouse.dir", workingDir.toString());
|
||||||
|
conf.set("hive.metastore.warehouse.dir", workingDir.resolve("warehouse").toString());
|
||||||
|
|
||||||
|
spark = SparkSession
|
||||||
|
.builder()
|
||||||
|
.appName(PrepareSWHActionsetsTest.class.getSimpleName())
|
||||||
|
.config(conf)
|
||||||
|
.getOrCreate();
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterAll
|
||||||
|
public static void afterAll() throws IOException {
|
||||||
|
FileUtils.deleteDirectory(workingDir.toFile());
|
||||||
|
spark.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testRun() throws Exception {
|
||||||
|
|
||||||
|
String lastVisitsPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/swh/last_visits_data.seq")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
String outputPath = workingDir.toString() + "/actionSet";
|
||||||
|
|
||||||
|
String softwareInputPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/swh/software.json.gz")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
PrepareSWHActionsets
|
||||||
|
.main(
|
||||||
|
new String[] {
|
||||||
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
|
"-lastVisitsPath", lastVisitsPath,
|
||||||
|
"-softwareInputPath", softwareInputPath,
|
||||||
|
"-actionsetsPath", outputPath
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.swh;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConnection;
|
||||||
|
import eu.dnetlib.dhp.swh.utils.SWHConstants;
|
||||||
|
|
||||||
|
//import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
|
||||||
|
public class SWHConnectionTest {
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(SWHConnectionTest.class);
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testGetCall() throws IOException {
|
||||||
|
|
||||||
|
HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
clientParams.setRequestMethod("GET");
|
||||||
|
|
||||||
|
SWHConnection swhConnection = new SWHConnection(clientParams, null);
|
||||||
|
|
||||||
|
String repoUrl = "https://github.com/stanford-futuredata/FAST";
|
||||||
|
URL url = new URL(String.format(SWHConstants.SWH_LATEST_VISIT_URL, repoUrl));
|
||||||
|
String response = null;
|
||||||
|
try {
|
||||||
|
response = swhConnection.call(url.toString());
|
||||||
|
} catch (CollectorException e) {
|
||||||
|
System.out.println("Error in request: " + url);
|
||||||
|
}
|
||||||
|
System.out.println(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testPostCall() throws MalformedURLException {
|
||||||
|
HttpClientParams clientParams = new HttpClientParams();
|
||||||
|
clientParams.setRequestMethod("POST");
|
||||||
|
|
||||||
|
SWHConnection swhConnection = new SWHConnection(clientParams, null);
|
||||||
|
|
||||||
|
String repoUrl = "https://github.com/stanford-futuredata/FAST";
|
||||||
|
URL url = new URL(String.format(SWHConstants.SWH_ARCHIVE_URL, SWHConstants.DEFAULT_VISIT_TYPE, repoUrl));
|
||||||
|
String response = null;
|
||||||
|
try {
|
||||||
|
response = swhConnection.call(url.toString());
|
||||||
|
} catch (CollectorException e) {
|
||||||
|
System.out.println("Error in request: " + url);
|
||||||
|
}
|
||||||
|
System.out.println(response);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
https://bitbucket.org/samskillman/yt-stokes {"origin":"https://bitbucket.org/samskillman/yt-stokes","visit":43,"date":"2021-09-13T21:59:27.125171+00:00","status":"failed","snapshot":null,"type":"hg","metadata":{},"origin_url":"https://archive.softwareheritage.org/api/1/origin/https://bitbucket.org/samskillman/yt-stokes/get/","snapshot_url":null}
|
||||||
|
https://github.com/bioinsilico/BIPSPI {"origin":"https://github.com/bioinsilico/BIPSPI","visit":1,"date":"2020-03-18T14:50:21.541822+00:00","status":"full","snapshot":"c6c69d2cd73ce89811448da5f031611df6f63bdb","type":"git","metadata":{},"origin_url":"https://archive.softwareheritage.org/api/1/origin/https://github.com/bioinsilico/BIPSPI/get/","snapshot_url":"https://archive.softwareheritage.org/api/1/snapshot/c6c69d2cd73ce89811448da5f031611df6f63bdb/"}
|
||||||
|
https://github.com/mloop/kdiff-type1-error-rate/blob/master/analysis/simulation.R {}
|
||||||
|
https://github.com/schwanbeck/YSMR {"origin":"https://github.com/schwanbeck/YSMR","visit":6,"date":"2023-08-02T15:25:02.650676+00:00","status":"full","snapshot":"a9d1c5f0bca2def198b89f65bc9f7da3be8439ed","type":"git","metadata":{},"origin_url":"https://archive.softwareheritage.org/api/1/origin/https://github.com/schwanbeck/YSMR/get/","snapshot_url":"https://archive.softwareheritage.org/api/1/snapshot/a9d1c5f0bca2def198b89f65bc9f7da3be8439ed/"}
|
||||||
|
https://github.com/lvclark/TASSELGBS_combine {"origin":"https://github.com/lvclark/TASSELGBS_combine","visit":1,"date":"2020-04-12T20:44:09.405589+00:00","status":"full","snapshot":"ffa6fefd3f5becefbea9fe0e6d5d93859c95c071","type":"git","metadata":{},"origin_url":"https://archive.softwareheritage.org/api/1/origin/https://github.com/lvclark/TASSELGBS_combine/get/","snapshot_url":"https://archive.softwareheritage.org/api/1/snapshot/ffa6fefd3f5becefbea9fe0e6d5d93859c95c071/"}
|
||||||
|
https://github.com/PRIDE-Toolsuite/inspector-example-files {"origin":"https://github.com/PRIDE-Toolsuite/inspector-example-files","visit":12,"date":"2021-01-25T08:54:13.394674+00:00","status":"full","snapshot":"0b56eb0ad07cf778df6dabefc4b73636e0ae8b37","type":"git","metadata":{},"origin_url":"https://archive.softwareheritage.org/api/1/origin/https://github.com/PRIDE-Toolsuite/inspector-example-files/get/","snapshot_url":"https://archive.softwareheritage.org/api/1/snapshot/0b56eb0ad07cf778df6dabefc4b73636e0ae8b37/"}
|
||||||
|
https://bitbucket.org/matwey/chelyabinsk {"origin":"https://bitbucket.org/matwey/chelyabinsk","visit":6,"date":"2021-09-24T19:32:43.322909+00:00","status":"full","snapshot":"215913858c3ee0e61e1aaea18241c5ee006da1b0","type":"hg","metadata":{},"origin_url":"https://archive.softwareheritage.org/api/1/origin/https://bitbucket.org/matwey/chelyabinsk/get/","snapshot_url":"https://archive.softwareheritage.org/api/1/snapshot/215913858c3ee0e61e1aaea18241c5ee006da1b0/"}
|
Can't render this file because it contains an unexpected character in line 1 and column 46.
|
Binary file not shown.
Binary file not shown.
|
@ -39,6 +39,7 @@
|
||||||
<module>dhp-broker-events</module>
|
<module>dhp-broker-events</module>
|
||||||
<module>dhp-doiboost</module>
|
<module>dhp-doiboost</module>
|
||||||
<module>dhp-impact-indicators</module>
|
<module>dhp-impact-indicators</module>
|
||||||
|
<module>dhp-swh</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
<pluginRepositories>
|
<pluginRepositories>
|
||||||
|
|
Loading…
Reference in New Issue