forked from D-Net/dnet-hadoop
Compare commits
351 Commits
affRoFromR
...
main
Author | SHA1 | Date |
---|---|---|
sab | ef6c90cc64 | |
sab | df82f8beb9 | |
sab | 53787dbf67 | |
sab | bbb79273a3 | |
sab | 7f39375ba8 | |
Claudio Atzori | d20a5e020a | |
Claudio Atzori | 3d1d8e6036 | |
Claudio Atzori | 0b1c58358b | |
Claudio Atzori | b70a440aca | |
Michele Artini | 36c3df1652 | |
Claudio Atzori | 2f13683285 | |
Claudio Atzori | 5ab409dcab | |
Claudio Atzori | b756cfeb85 | |
Claudio Atzori | 51d6a541bd | |
Claudio Atzori | 07ce92cef2 | |
Miriam Baglioni | f043b7b096 | |
Claudio Atzori | 153b56eeff | |
Claudio Atzori | ed97ba4565 | |
Claudio Atzori | 7b398a6d0b | |
Claudio Atzori | 13f6506ce5 | |
Claudio Atzori | 3d9ddaa23a | |
Claudio Atzori | c06dfdfd86 | |
Claudio Atzori | b822b34abe | |
Michele De Bonis | ea1841fbd2 | |
Miriam Baglioni | 4dbce39237 | |
Miriam Baglioni | 3ee8a7d18a | |
Claudio Atzori | ee7deb3f60 | |
Claudio Atzori | 157cc8be87 | |
Claudio Atzori | 023099a921 | |
Claudio Atzori | 786c217085 | |
Lampros Smyrnaios | c858c02111 | |
Claudio Atzori | 8220e27110 | |
Claudio Atzori | bc993d49c1 | |
Claudio Atzori | 1dc7458de2 | |
Claudio Atzori | a7a54aab47 | |
Miriam Baglioni | eaa00a4199 | |
Claudio Atzori | fb731b6d46 | |
Miriam Baglioni | b6da35e736 | |
Lampros Smyrnaios | 3c9b8de892 | |
Antonis Lempesis | c67ef157d3 | |
Lampros Smyrnaios | c23f3031ed | |
Claudio Atzori | 8ec151aa3d | |
Claudio Atzori | 2636936162 | |
Miriam Baglioni | ef437a8cdf | |
Miriam Baglioni | 86088ef26e | |
Miriam Baglioni | 143c525343 | |
Claudio Atzori | c371513d43 | |
Claudio Atzori | 71927ca818 | |
Giambattista Bloisi | 46018dc804 | |
Miriam Baglioni | 3efd5b1308 | |
Miriam Baglioni | 196fa55774 | |
Miriam Baglioni | 50805e3fc1 | |
Claudio Atzori | d39a1054b8 | |
Claudio Atzori | 576efc1857 | |
Claudio Atzori | efc1632e16 | |
Claudio Atzori | 91b49366c6 | |
Claudio Atzori | 5e05385d35 | |
Miriam Baglioni | c4d9b5b9d2 | |
Miriam Baglioni | bf9a5e6314 | |
Miriam Baglioni | 9d79ddb3dd | |
Miriam Baglioni | 907aa28c6c | |
Miriam Baglioni | 3955ceaa76 | |
Miriam Baglioni | 128c143394 | |
Claudio Atzori | 5133993ee5 | |
Claudio Atzori | 5cf259a851 | |
Claudio Atzori | e1828fc60e | |
Claudio Atzori | 56920b447d | |
Giambattista Bloisi | 3feab5d92d | |
Claudio Atzori | 6be783caec | |
Claudio Atzori | b703f94f09 | |
Miriam Baglioni | 14f275ffaf | |
Claudio Atzori | a428e7be7e | |
Lampros Smyrnaios | e3f28338c1 | |
Claudio Atzori | 8e45c5baa8 | |
Claudio Atzori | db5e18c784 | |
Claudio Atzori | fb266efbcb | |
Claudio Atzori | d7daf54333 | |
Claudio Atzori | f99eaa0376 | |
Claudio Atzori | 23312fcc1e | |
Miriam Baglioni | b864f0adcf | |
Miriam Baglioni | 7a44869d87 | |
Miriam Baglioni | 12ffde023f | |
Antonis Lempesis | 15b54a345a | |
Lampros Smyrnaios | b48ed6e617 | |
Lampros Smyrnaios | 68322843e2 | |
Lampros Smyrnaios | c7b32bbacc | |
Claudio Atzori | c3fe59bc78 | |
Claudio Atzori | 1ea67eba82 | |
Claudio Atzori | f9fb2fef6e | |
Claudio Atzori | 834461ba26 | |
Sandro La Bruzzo | 032bcc8279 | |
Claudio Atzori | 92f018d196 | |
Claudio Atzori | 0611c81a2f | |
Claudio Atzori | 1efe7f7e39 | |
Claudio Atzori | 53e7bb4336 | |
Claudio Atzori | f7d56e2ef2 | |
Claudio Atzori | c1237ab39e | |
Claudio Atzori | dc3a5858f7 | |
Claudio Atzori | 55f39f7850 | |
Claudio Atzori | 39a2afe8b5 | |
Claudio Atzori | 908ed9da7a | |
Antonis Lempesis | 0cada3cc8f | |
Antonis Lempesis | 90a4fb3547 | |
Claudio Atzori | 18aa323ee9 | |
Claudio Atzori | b4e3389432 | |
Giambattista Bloisi | 711048ceed | |
Michele Artini | f4068de298 | |
Michele Artini | 2615136efc | |
Lampros Smyrnaios | 49af2e5740 | |
Antonis Lempesis | d2649a1429 | |
Claudio Atzori | 795e1b2629 | |
Claudio Atzori | 0c05abe50b | |
Antonis Lempesis | b52a5a753b | |
Antonis Lempesis | c3fe9662b2 | |
Claudio Atzori | 8fdd0244ad | |
Claudio Atzori | 18fdaaf548 | |
Antonis Lempesis | 0c71c58df6 | |
Antonis Lempesis | 43d05dbebb | |
Antonis Lempesis | e728a0897c | |
Antonis Lempesis | 308ae580a9 | |
Antonis Lempesis | 27d22bd8f9 | |
Antonis Lempesis | 1f5aba12fa | |
Claudio Atzori | 43e123c624 | |
Claudio Atzori | 62a07b7add | |
Claudio Atzori | 96bddcc921 | |
Miriam Baglioni | 0486cea4c4 | |
Claudio Atzori | 013935c593 | |
Claudio Atzori | 6132bd028e | |
Miriam Baglioni | 519db1ddef | |
Claudio Atzori | 5add51f38c | |
Claudio Atzori | f01390702e | |
Claudio Atzori | 5592ccc37a | |
Claudio Atzori | d16c15da8d | |
Claudio Atzori | 09a6d17059 | |
Claudio Atzori | d70793847d | |
Michele De Bonis | f6601ea7d1 | |
Michele De Bonis | cd4c3c934d | |
Michele Artini | a99942f7cf | |
Michele Artini | 7f7083f53e | |
Michele Artini | d9b23a76c5 | |
Michele Artini | 841ca92246 | |
Michele Artini | 3bcfc40293 | |
Giambattista Bloisi | 3067ea390d | |
Miriam Baglioni | c94d94035c | |
Michele Artini | 4374d7449e | |
Claudio Atzori | 07d009007b | |
Claudio Atzori | 071d044971 | |
Claudio Atzori | b3ddbaed58 | |
Claudio Atzori | 1416f16b35 | |
Giambattista Bloisi | ba1a0e7b4f | |
Giambattista Bloisi | 079085286c | |
Giambattista Bloisi | 8dd666aedd | |
Claudio Atzori | f21133229a | |
Claudio Atzori | d86b909db2 | |
Claudio Atzori | 08162902ab | |
Claudio Atzori | e8630a6d03 | |
Claudio Atzori | f28c63d5ef | |
Claudio Atzori | 1a8b609ed2 | |
Miriam Baglioni | 4c8706efee | |
Claudio Atzori | 4d0c59669b | |
Sandro La Bruzzo | 3c8c88bdd3 | |
Claudio Atzori | 106968adaa | |
Claudio Atzori | a8a4db96f0 | |
Sandro La Bruzzo | 37e36baf76 | |
Sandro La Bruzzo | 9d39845d1f | |
Sandro La Bruzzo | 1fbd4325f5 | |
Sandro La Bruzzo | 1f1a6a5f5f | |
Claudio Atzori | c4ec35b6cd | |
Claudio Atzori | 1726f49790 | |
Claudio Atzori | 1763d377ad | |
Claudio Atzori | a0311e8a90 | |
Claudio Atzori | 8fb05888fd | |
Claudio Atzori | 2b626815ff | |
Miriam Baglioni | b177cd5a0a | |
Serafeim Chatzopoulos | 671ba8a5a7 | |
Claudio Atzori | 5f1ed61c1f | |
Claudio Atzori | 8c03c41d5d | |
Claudio Atzori | 97454e9594 | |
Serafeim Chatzopoulos | 7e34dde774 | |
Serafeim Chatzopoulos | 24c3f92d87 | |
Serafeim Chatzopoulos | 6ce9b600c1 | |
Serafeim Chatzopoulos | 94089878fd | |
Miriam Baglioni | 0097f4e64b | |
Miriam Baglioni | 5c5a195e97 | |
Miriam Baglioni | 70b78a40c7 | |
Miriam Baglioni | f206ff42d6 | |
Miriam Baglioni | 34358afe75 | |
Miriam Baglioni | 18bfff8af3 | |
Miriam Baglioni | 69dac91659 | |
Miriam Baglioni | a9ede1e989 | |
Claudio Atzori | 242d647146 | |
Claudio Atzori | af3ffad6c4 | |
Claudio Atzori | ba5475ed4c | |
Giambattista Bloisi | 2c235e82ad | |
Claudio Atzori | 4ac06c9e37 | |
Claudio Atzori | fa692b3629 | |
Claudio Atzori | ef02648399 | |
Claudio Atzori | d13bb534f0 | |
Giambattista Bloisi | 775c3f704a | |
Sandro La Bruzzo | 9c3ab11d5b | |
Sandro La Bruzzo | 423ef30676 | |
Giambattista Bloisi | 7152d47f84 | |
Claudio Atzori | 4853c19b5e | |
Giambattista Bloisi | 1f226d1dce | |
Alessia Bardi | 6186cdc2cc | |
Alessia Bardi | d94b9bebf7 | |
Alessia Bardi | 19abba8fa7 | |
Claudio Atzori | c2f179800c | |
Serafeim Chatzopoulos | 2aed5a74be | |
Claudio Atzori | 4dc4862011 | |
Claudio Atzori | dc80ab14d3 | |
Alessia Bardi | 77a2199837 | |
Claudio Atzori | 265180bfd2 | |
Claudio Atzori | da0e9828f7 | |
Miriam Baglioni | 599828ce35 | |
Claudio Atzori | 0bc74e2000 | |
Claudio Atzori | 7180911ded | |
Claudio Atzori | da1727f93f | |
Claudio Atzori | ccac6a7f75 | |
Claudio Atzori | d512df8612 | |
Claudio Atzori | 59764145bb | |
Miriam Baglioni | 9e8e39f78a | |
Claudio Atzori | 373a5f2c83 | |
Claudio Atzori | 8af129b0c7 | |
dimitrispie | 706092bc19 | |
dimitrispie | aedd279f78 | |
Miriam Baglioni | 8dcd028eed | |
Miriam Baglioni | 8621377917 | |
Miriam Baglioni | ef2dd7a980 | |
Claudio Atzori | f3a85e224b | |
Claudio Atzori | 4ef0f2ec26 | |
Claudio Atzori | 288ec0b7d6 | |
Claudio Atzori | 5f32edd9bf | |
Claudio Atzori | e10ce92fe5 | |
Claudio Atzori | b93e1541aa | |
Claudio Atzori | d029bf0b94 | |
Michele Artini | 009d7f312f | |
Miriam Baglioni | e4b27182d0 | |
Giambattista Bloisi | 758e662ab8 | |
Giambattista Bloisi | 485f9d18cb | |
Michele Artini | a92206dab5 | |
Miriam Baglioni | d9506035e4 | |
Alessia Bardi | 118e72d7db | |
Alessia Bardi | 5befd93d7d | |
Michele Artini | cae92cf811 | |
Miriam Baglioni | b64a5eb4a5 | |
Claudio Atzori | 654ffcba60 | |
Claudio Atzori | db625e548d | |
Alessia Bardi | 04141fe259 | |
Alessia Bardi | b88f009d9f | |
Alessia Bardi | 5ffe82ffd8 | |
Alessia Bardi | 1c173642f0 | |
Alessia Bardi | 382f46a8e4 | |
Miriam Baglioni | 9fc8ebe98b | |
Miriam Baglioni | 24c41806ac | |
Miriam Baglioni | 087b5a7973 | |
Claudio Atzori | 688e3b7936 | |
Claudio Atzori | 2e465915b4 | |
Claudio Atzori | 4a4ca634f0 | |
Miriam Baglioni | c6a7602b3e | |
Miriam Baglioni | 831055a1fc | |
Miriam Baglioni | cf3d0f4f83 | |
Claudio Atzori | 4f67225fbc | |
Claudio Atzori | e093f04874 | |
Miriam Baglioni | c5a9f39141 | |
Miriam Baglioni | ecc05fe0f3 | |
Claudio Atzori | 42442ccd39 | |
Miriam Baglioni | 9a9cc6a1dd | |
Michele Artini | 200098b683 | |
Michele Artini | 9c1df15071 | |
Miriam Baglioni | 32870339f5 | |
Miriam Baglioni | 7184cc0804 | |
Miriam Baglioni | 7473093c84 | |
Miriam Baglioni | 5f0906be60 | |
Claudio Atzori | 1b37516578 | |
Claudio Atzori | c1e2460293 | |
Claudio Atzori | 3800361033 | |
Michele Artini | 699736addc | |
Claudio Atzori | f86e19b282 | |
Michele Artini | d40e20f437 | |
Michele Artini | 4953ae5649 | |
Miriam Baglioni | c60d3a2b46 | |
Claudio Atzori | 7becdaf31d | |
Miriam Baglioni | b713132db7 | |
Miriam Baglioni | 11f2b470d3 | |
Sandro La Bruzzo | 91c70b15a5 | |
Claudio Atzori | f910b7379d | |
Claudio Atzori | 33bdad104e | |
Claudio Atzori | 5816ded93f | |
Claudio Atzori | 46972f8393 | |
Claudio Atzori | da85ca697d | |
Miriam Baglioni | 059e100ec7 | |
Miriam Baglioni | fc95a550c3 | |
Miriam Baglioni | 6901ac91b1 | |
Claudio Atzori | 08c4588d47 | |
Miriam Baglioni | 29d3da85f1 | |
Miriam Baglioni | 33a2b1b5dc | |
Miriam Baglioni | c6df8327b3 | |
Miriam Baglioni | 935aa367d8 | |
Miriam Baglioni | 43aedbdfe5 | |
Miriam Baglioni | b6da9b67ff | |
Claudio Atzori | a34c8b6f81 | |
Miriam Baglioni | 122e75aa17 | |
Miriam Baglioni | cee7a45b1d | |
Claudio Atzori | ed64618235 | |
Claudio Atzori | 8742934843 | |
Claudio Atzori | 13cc592f39 | |
Claudio Atzori | af15b1e48d | |
Claudio Atzori | eb45ba7af0 | |
Claudio Atzori | a929dc5fee | |
Miriam Baglioni | 5f9383b2d9 | |
Miriam Baglioni | b18bbca8af | |
dimitrispie | 55fa3b2a17 | |
Claudio Atzori | 80c5e0f637 | |
Claudio Atzori | c01d528ab2 | |
Claudio Atzori | e6d788d27a | |
Claudio Atzori | 930f118673 | |
Claudio Atzori | b2c3071e72 | |
Claudio Atzori | 10ec074f79 | |
Claudio Atzori | 7225fe9cbe | |
Miriam Baglioni | 869e129288 | |
Miriam Baglioni | 840465958b | |
Claudio Atzori | bdc8f993d0 | |
Miriam Baglioni | ec87149cb3 | |
Miriam Baglioni | b42e2c9df6 | |
Miriam Baglioni | 1329aa8479 | |
Miriam Baglioni | a0ee1a8640 | |
Claudio Atzori | 96062164f9 | |
Claudio Atzori | 35bb7c423f | |
Claudio Atzori | fd87571506 | |
Claudio Atzori | c527112e33 | |
Claudio Atzori | 65209359bc | |
Claudio Atzori | d72a64ded3 | |
Claudio Atzori | 3e8499ce47 | |
Claudio Atzori | 61aacb3271 | |
Claudio Atzori | dbb567251a | |
Claudio Atzori | c7e8ad853e | |
Claudio Atzori | 0849ebfd80 | |
Claudio Atzori | 281239249e | |
Claudio Atzori | 45fc5e12be | |
Claudio Atzori | 1c05aaaa2e | |
Claudio Atzori | 01d5ad6361 | |
Claudio Atzori | d872d1cdd9 | |
Claudio Atzori | ab0efecab4 | |
Claudio Atzori | 725c3c68d0 | |
Claudio Atzori | 300ae6221c | |
Claudio Atzori | 0ec2eaba35 | |
Claudio Atzori | a387807d43 | |
Claudio Atzori | 2abe2bc137 | |
Claudio Atzori | a07c876922 | |
Claudio Atzori | cbd48bc645 |
|
@ -7,12 +7,12 @@ import java.sql.*;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.slf4j.Logger;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class DbClient implements Closeable {
|
public class DbClient implements Closeable {
|
||||||
|
|
||||||
private static final Log log = LogFactory.getLog(DbClient.class);
|
private static final Logger log = LoggerFactory.getLogger(DbClient.class);
|
||||||
|
|
||||||
private final Connection connection;
|
private final Connection connection;
|
||||||
|
|
||||||
|
@ -37,6 +37,8 @@ public class DbClient implements Closeable {
|
||||||
try (final Statement stmt = connection.createStatement()) {
|
try (final Statement stmt = connection.createStatement()) {
|
||||||
stmt.setFetchSize(100);
|
stmt.setFetchSize(100);
|
||||||
|
|
||||||
|
log.info("running SQL:\n\n{}\n\n", sql);
|
||||||
|
|
||||||
try (final ResultSet rs = stmt.executeQuery(sql)) {
|
try (final ResultSet rs = stmt.executeQuery(sql)) {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
consumer.accept(rs);
|
consumer.accept(rs);
|
||||||
|
|
|
@ -1,53 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
|
|
||||||
import okhttp3.MediaType;
|
|
||||||
import okhttp3.RequestBody;
|
|
||||||
import okhttp3.internal.Util;
|
|
||||||
import okio.BufferedSink;
|
|
||||||
import okio.Okio;
|
|
||||||
import okio.Source;
|
|
||||||
|
|
||||||
public class InputStreamRequestBody extends RequestBody {
|
|
||||||
|
|
||||||
private final InputStream inputStream;
|
|
||||||
private final MediaType mediaType;
|
|
||||||
private final long lenght;
|
|
||||||
|
|
||||||
public static RequestBody create(final MediaType mediaType, final InputStream inputStream, final long len) {
|
|
||||||
|
|
||||||
return new InputStreamRequestBody(inputStream, mediaType, len);
|
|
||||||
}
|
|
||||||
|
|
||||||
private InputStreamRequestBody(InputStream inputStream, MediaType mediaType, long len) {
|
|
||||||
this.inputStream = inputStream;
|
|
||||||
this.mediaType = mediaType;
|
|
||||||
this.lenght = len;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MediaType contentType() {
|
|
||||||
return mediaType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long contentLength() {
|
|
||||||
|
|
||||||
return lenght;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(BufferedSink sink) throws IOException {
|
|
||||||
Source source = null;
|
|
||||||
try {
|
|
||||||
source = Okio.source(inputStream);
|
|
||||||
sink.writeAll(source);
|
|
||||||
} finally {
|
|
||||||
Util.closeQuietly(source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
public class MissingConceptDoiException extends Throwable {
|
|
||||||
public MissingConceptDoiException(String message) {
|
|
||||||
super(message);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,365 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.HttpURLConnection;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
|
|
||||||
import org.apache.http.HttpHeaders;
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
import com.google.gson.Gson;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.api.zenodo.ZenodoModel;
|
|
||||||
import eu.dnetlib.dhp.common.api.zenodo.ZenodoModelList;
|
|
||||||
import okhttp3.*;
|
|
||||||
|
|
||||||
public class ZenodoAPIClient implements Serializable {
|
|
||||||
|
|
||||||
String urlString;
|
|
||||||
String bucket;
|
|
||||||
|
|
||||||
String deposition_id;
|
|
||||||
String access_token;
|
|
||||||
|
|
||||||
public static final MediaType MEDIA_TYPE_JSON = MediaType.parse("application/json; charset=utf-8");
|
|
||||||
|
|
||||||
private static final MediaType MEDIA_TYPE_ZIP = MediaType.parse("application/zip");
|
|
||||||
|
|
||||||
public String getUrlString() {
|
|
||||||
return urlString;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setUrlString(String urlString) {
|
|
||||||
this.urlString = urlString;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getBucket() {
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setBucket(String bucket) {
|
|
||||||
this.bucket = bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDeposition_id(String deposition_id) {
|
|
||||||
this.deposition_id = deposition_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ZenodoAPIClient(String urlString, String access_token) {
|
|
||||||
|
|
||||||
this.urlString = urlString;
|
|
||||||
this.access_token = access_token;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Brand new deposition in Zenodo. It sets the deposition_id and the bucket where to store the files to upload
|
|
||||||
*
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public int newDeposition() throws IOException {
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
URL url = new URL(urlString);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setRequestMethod("POST");
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = json.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
conn.disconnect();
|
|
||||||
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel newSubmission = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
this.bucket = newSubmission.getLinks().getBucket();
|
|
||||||
this.deposition_id = newSubmission.getId();
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Upload files in Zenodo.
|
|
||||||
*
|
|
||||||
* @param is the inputStream for the file to upload
|
|
||||||
* @param file_name the name of the file as it will appear on Zenodo
|
|
||||||
* @return the response code
|
|
||||||
*/
|
|
||||||
public int uploadIS(InputStream is, String file_name) throws IOException {
|
|
||||||
|
|
||||||
URL url = new URL(bucket + "/" + file_name);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, "application/zip");
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("PUT");
|
|
||||||
|
|
||||||
byte[] buf = new byte[8192];
|
|
||||||
int length;
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
while ((length = is.read(buf)) != -1) {
|
|
||||||
os.write(buf, 0, length);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
if (!checkOKStatus(responseCode)) {
|
|
||||||
throw new IOException("Unexpected code " + responseCode + getBody(conn));
|
|
||||||
}
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private String getBody(HttpURLConnection conn) throws IOException {
|
|
||||||
String body = "{}";
|
|
||||||
try (BufferedReader br = new BufferedReader(
|
|
||||||
new InputStreamReader(conn.getInputStream(), "utf-8"))) {
|
|
||||||
StringBuilder response = new StringBuilder();
|
|
||||||
String responseLine = null;
|
|
||||||
while ((responseLine = br.readLine()) != null) {
|
|
||||||
response.append(responseLine.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
body = response.toString();
|
|
||||||
|
|
||||||
}
|
|
||||||
return body;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Associates metadata information to the current deposition
|
|
||||||
*
|
|
||||||
* @param metadata the metadata
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public int sendMretadata(String metadata) throws IOException {
|
|
||||||
|
|
||||||
URL url = new URL(urlString + "/" + deposition_id);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("PUT");
|
|
||||||
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = metadata.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
final int responseCode = conn.getResponseCode();
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + getBody(conn));
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean checkOKStatus(int responseCode) {
|
|
||||||
|
|
||||||
if (HttpURLConnection.HTTP_OK != responseCode ||
|
|
||||||
HttpURLConnection.HTTP_CREATED != responseCode)
|
|
||||||
return true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* To publish the current deposition. It works for both new deposition or new version of an old deposition
|
|
||||||
*
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public int publish() throws IOException {
|
|
||||||
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
|
||||||
|
|
||||||
RequestBody body = RequestBody.create(json, MEDIA_TYPE_JSON);
|
|
||||||
|
|
||||||
Request request = new Request.Builder()
|
|
||||||
.url(urlString + "/" + deposition_id + "/actions/publish")
|
|
||||||
.addHeader("Authorization", "Bearer " + access_token)
|
|
||||||
.post(body)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
try (Response response = httpClient.newCall(request).execute()) {
|
|
||||||
|
|
||||||
if (!response.isSuccessful())
|
|
||||||
throw new IOException("Unexpected code " + response + response.body().string());
|
|
||||||
|
|
||||||
return response.code();
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* To create a new version of an already published deposition. It sets the deposition_id and the bucket to be used
|
|
||||||
* for the new version.
|
|
||||||
*
|
|
||||||
* @param concept_rec_id the concept record id of the deposition for which to create a new version. It is the last
|
|
||||||
* part of the url for the DOI Zenodo suggests to use to cite all versions: DOI: 10.xxx/zenodo.656930
|
|
||||||
* concept_rec_id = 656930
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
* @throws MissingConceptDoiException
|
|
||||||
*/
|
|
||||||
public int newVersion(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
|
||||||
setDepositionId(concept_rec_id, 1);
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
URL url = new URL(urlString + "/" + deposition_id + "/actions/newversion");
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("POST");
|
|
||||||
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = json.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
String latest_draft = zenodoModel.getLinks().getLatest_draft();
|
|
||||||
deposition_id = latest_draft.substring(latest_draft.lastIndexOf("/") + 1);
|
|
||||||
bucket = getBucket(latest_draft);
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* To finish uploading a version or new deposition not published
|
|
||||||
* It sets the deposition_id and the bucket to be used
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* @param deposition_id the deposition id of the not yet published upload
|
|
||||||
* concept_rec_id = 656930
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
* @throws MissingConceptDoiException
|
|
||||||
*/
|
|
||||||
public int uploadOpenDeposition(String deposition_id) throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
this.deposition_id = deposition_id;
|
|
||||||
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
URL url = new URL(urlString + "/" + deposition_id);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setRequestMethod("POST");
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = json.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
conn.disconnect();
|
|
||||||
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
bucket = zenodoModel.getLinks().getBucket();
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setDepositionId(String concept_rec_id, Integer page) throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
ZenodoModelList zenodoModelList = new Gson()
|
|
||||||
.fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
|
|
||||||
|
|
||||||
for (ZenodoModel zm : zenodoModelList) {
|
|
||||||
if (zm.getConceptrecid().equals(concept_rec_id)) {
|
|
||||||
deposition_id = zm.getId();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (zenodoModelList.size() == 0)
|
|
||||||
throw new MissingConceptDoiException(
|
|
||||||
"The concept record id specified was missing in the list of depositions");
|
|
||||||
setDepositionId(concept_rec_id, page + 1);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getPrevDepositions(String page) throws IOException {
|
|
||||||
|
|
||||||
HttpUrl.Builder urlBuilder = HttpUrl.parse(urlString).newBuilder();
|
|
||||||
urlBuilder.addQueryParameter("page", page);
|
|
||||||
|
|
||||||
URL url = new URL(urlBuilder.build().toString());
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
return body;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getBucket(String inputUurl) throws IOException {
|
|
||||||
|
|
||||||
URL url = new URL(inputUurl);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
|
|
||||||
return zenodoModel.getLinks().getBucket();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
public class Community {
|
|
||||||
private String identifier;
|
|
||||||
|
|
||||||
public String getIdentifier() {
|
|
||||||
return identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIdentifier(String identifier) {
|
|
||||||
this.identifier = identifier;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
public class Creator {
|
|
||||||
private String affiliation;
|
|
||||||
private String name;
|
|
||||||
private String orcid;
|
|
||||||
|
|
||||||
public String getAffiliation() {
|
|
||||||
return affiliation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAffiliation(String affiliation) {
|
|
||||||
this.affiliation = affiliation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getName() {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setName(String name) {
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getOrcid() {
|
|
||||||
return orcid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setOrcid(String orcid) {
|
|
||||||
this.orcid = orcid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Creator newInstance(String name, String affiliation, String orcid) {
|
|
||||||
Creator c = new Creator();
|
|
||||||
if (name != null) {
|
|
||||||
c.name = name;
|
|
||||||
}
|
|
||||||
if (affiliation != null) {
|
|
||||||
c.affiliation = affiliation;
|
|
||||||
}
|
|
||||||
if (orcid != null) {
|
|
||||||
c.orcid = orcid;
|
|
||||||
}
|
|
||||||
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,44 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class File implements Serializable {
|
|
||||||
private String checksum;
|
|
||||||
private String filename;
|
|
||||||
private long filesize;
|
|
||||||
private String id;
|
|
||||||
|
|
||||||
public String getChecksum() {
|
|
||||||
return checksum;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setChecksum(String checksum) {
|
|
||||||
this.checksum = checksum;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFilename() {
|
|
||||||
return filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFilename(String filename) {
|
|
||||||
this.filename = filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getFilesize() {
|
|
||||||
return filesize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFilesize(long filesize) {
|
|
||||||
this.filesize = filesize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setId(String id) {
|
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class Grant implements Serializable {
|
|
||||||
private String id;
|
|
||||||
|
|
||||||
public String getId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setId(String id) {
|
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Grant newInstance(String id) {
|
|
||||||
Grant g = new Grant();
|
|
||||||
g.id = id;
|
|
||||||
|
|
||||||
return g;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,92 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class Links implements Serializable {
|
|
||||||
|
|
||||||
private String bucket;
|
|
||||||
|
|
||||||
private String discard;
|
|
||||||
|
|
||||||
private String edit;
|
|
||||||
private String files;
|
|
||||||
private String html;
|
|
||||||
private String latest_draft;
|
|
||||||
private String latest_draft_html;
|
|
||||||
private String publish;
|
|
||||||
|
|
||||||
private String self;
|
|
||||||
|
|
||||||
public String getBucket() {
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setBucket(String bucket) {
|
|
||||||
this.bucket = bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDiscard() {
|
|
||||||
return discard;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDiscard(String discard) {
|
|
||||||
this.discard = discard;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getEdit() {
|
|
||||||
return edit;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setEdit(String edit) {
|
|
||||||
this.edit = edit;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFiles() {
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFiles(String files) {
|
|
||||||
this.files = files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getHtml() {
|
|
||||||
return html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setHtml(String html) {
|
|
||||||
this.html = html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLatest_draft() {
|
|
||||||
return latest_draft;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLatest_draft(String latest_draft) {
|
|
||||||
this.latest_draft = latest_draft;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLatest_draft_html() {
|
|
||||||
return latest_draft_html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLatest_draft_html(String latest_draft_html) {
|
|
||||||
this.latest_draft_html = latest_draft_html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPublish() {
|
|
||||||
return publish;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPublish(String publish) {
|
|
||||||
this.publish = publish;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getSelf() {
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSelf(String self) {
|
|
||||||
this.self = self;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,153 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class Metadata implements Serializable {
|
|
||||||
|
|
||||||
private String access_right;
|
|
||||||
private List<Community> communities;
|
|
||||||
private List<Creator> creators;
|
|
||||||
private String description;
|
|
||||||
private String doi;
|
|
||||||
private List<Grant> grants;
|
|
||||||
private List<String> keywords;
|
|
||||||
private String language;
|
|
||||||
private String license;
|
|
||||||
private PrereserveDoi prereserve_doi;
|
|
||||||
private String publication_date;
|
|
||||||
private List<String> references;
|
|
||||||
private List<RelatedIdentifier> related_identifiers;
|
|
||||||
private String title;
|
|
||||||
private String upload_type;
|
|
||||||
private String version;
|
|
||||||
|
|
||||||
public String getUpload_type() {
|
|
||||||
return upload_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setUpload_type(String upload_type) {
|
|
||||||
this.upload_type = upload_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getVersion() {
|
|
||||||
return version;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setVersion(String version) {
|
|
||||||
this.version = version;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getAccess_right() {
|
|
||||||
return access_right;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAccess_right(String access_right) {
|
|
||||||
this.access_right = access_right;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Community> getCommunities() {
|
|
||||||
return communities;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCommunities(List<Community> communities) {
|
|
||||||
this.communities = communities;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Creator> getCreators() {
|
|
||||||
return creators;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCreators(List<Creator> creators) {
|
|
||||||
this.creators = creators;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDescription() {
|
|
||||||
return description;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDescription(String description) {
|
|
||||||
this.description = description;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDoi() {
|
|
||||||
return doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDoi(String doi) {
|
|
||||||
this.doi = doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Grant> getGrants() {
|
|
||||||
return grants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setGrants(List<Grant> grants) {
|
|
||||||
this.grants = grants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getKeywords() {
|
|
||||||
return keywords;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setKeywords(List<String> keywords) {
|
|
||||||
this.keywords = keywords;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLanguage() {
|
|
||||||
return language;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLanguage(String language) {
|
|
||||||
this.language = language;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLicense() {
|
|
||||||
return license;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLicense(String license) {
|
|
||||||
this.license = license;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PrereserveDoi getPrereserve_doi() {
|
|
||||||
return prereserve_doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPrereserve_doi(PrereserveDoi prereserve_doi) {
|
|
||||||
this.prereserve_doi = prereserve_doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPublication_date() {
|
|
||||||
return publication_date;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPublication_date(String publication_date) {
|
|
||||||
this.publication_date = publication_date;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getReferences() {
|
|
||||||
return references;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setReferences(List<String> references) {
|
|
||||||
this.references = references;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<RelatedIdentifier> getRelated_identifiers() {
|
|
||||||
return related_identifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRelated_identifiers(List<RelatedIdentifier> related_identifiers) {
|
|
||||||
this.related_identifiers = related_identifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTitle() {
|
|
||||||
return title;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTitle(String title) {
|
|
||||||
this.title = title;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class PrereserveDoi implements Serializable {
|
|
||||||
private String doi;
|
|
||||||
private String recid;
|
|
||||||
|
|
||||||
public String getDoi() {
|
|
||||||
return doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDoi(String doi) {
|
|
||||||
this.doi = doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRecid() {
|
|
||||||
return recid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRecid(String recid) {
|
|
||||||
this.recid = recid;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class RelatedIdentifier implements Serializable {
|
|
||||||
private String identifier;
|
|
||||||
private String relation;
|
|
||||||
private String resource_type;
|
|
||||||
private String scheme;
|
|
||||||
|
|
||||||
public String getIdentifier() {
|
|
||||||
return identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIdentifier(String identifier) {
|
|
||||||
this.identifier = identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRelation() {
|
|
||||||
return relation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRelation(String relation) {
|
|
||||||
this.relation = relation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getResource_type() {
|
|
||||||
return resource_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResource_type(String resource_type) {
|
|
||||||
this.resource_type = resource_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getScheme() {
|
|
||||||
return scheme;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setScheme(String scheme) {
|
|
||||||
this.scheme = scheme;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,118 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class ZenodoModel implements Serializable {
|
|
||||||
|
|
||||||
private String conceptrecid;
|
|
||||||
private String created;
|
|
||||||
|
|
||||||
private List<File> files;
|
|
||||||
private String id;
|
|
||||||
private Links links;
|
|
||||||
private Metadata metadata;
|
|
||||||
private String modified;
|
|
||||||
private String owner;
|
|
||||||
private String record_id;
|
|
||||||
private String state;
|
|
||||||
private boolean submitted;
|
|
||||||
private String title;
|
|
||||||
|
|
||||||
public String getConceptrecid() {
|
|
||||||
return conceptrecid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConceptrecid(String conceptrecid) {
|
|
||||||
this.conceptrecid = conceptrecid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getCreated() {
|
|
||||||
return created;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCreated(String created) {
|
|
||||||
this.created = created;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<File> getFiles() {
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFiles(List<File> files) {
|
|
||||||
this.files = files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setId(String id) {
|
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Links getLinks() {
|
|
||||||
return links;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLinks(Links links) {
|
|
||||||
this.links = links;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Metadata getMetadata() {
|
|
||||||
return metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMetadata(Metadata metadata) {
|
|
||||||
this.metadata = metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getModified() {
|
|
||||||
return modified;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setModified(String modified) {
|
|
||||||
this.modified = modified;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getOwner() {
|
|
||||||
return owner;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setOwner(String owner) {
|
|
||||||
this.owner = owner;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRecord_id() {
|
|
||||||
return record_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRecord_id(String record_id) {
|
|
||||||
this.record_id = record_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getState() {
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setState(String state) {
|
|
||||||
this.state = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isSubmitted() {
|
|
||||||
return submitted;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSubmitted(boolean submitted) {
|
|
||||||
this.submitted = submitted;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTitle() {
|
|
||||||
return title;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTitle(String title) {
|
|
||||||
this.title = title;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
|
|
||||||
public class ZenodoModelList extends ArrayList<ZenodoModel> {
|
|
||||||
}
|
|
|
@ -119,7 +119,7 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
.getContext()
|
.getContext()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(c -> !StringUtils.startsWith(c.getId().toLowerCase(), contextId))
|
.filter(c -> !StringUtils.startsWith(c.getId().toLowerCase(), contextId))
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toCollection(ArrayList::new)));
|
||||||
}
|
}
|
||||||
return (T) res;
|
return (T) res;
|
||||||
} else {
|
} else {
|
||||||
|
@ -1003,4 +1003,41 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
.orElse(null);
|
.orElse(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implements bad and ugly things that we should get rid of ASAP.
|
||||||
|
*
|
||||||
|
* @param value
|
||||||
|
* @return
|
||||||
|
* @param <T>
|
||||||
|
*/
|
||||||
|
public static <T extends Oaf> T dedicatedUglyHacks(T value) {
|
||||||
|
if (value instanceof OafEntity) {
|
||||||
|
if (value instanceof Result) {
|
||||||
|
final Result r = (Result) value;
|
||||||
|
|
||||||
|
// Fix for AMS Acta
|
||||||
|
Optional
|
||||||
|
.ofNullable(r.getInstance())
|
||||||
|
.map(
|
||||||
|
instance -> instance
|
||||||
|
.stream()
|
||||||
|
.filter(
|
||||||
|
i -> Optional
|
||||||
|
.ofNullable(i.getHostedby())
|
||||||
|
.map(KeyValue::getKey)
|
||||||
|
.map(dsId -> dsId.equals("10|re3data_____::4cc76bed7ce2fb95fd8e7a2dfde16016"))
|
||||||
|
.orElse(false)))
|
||||||
|
.ifPresent(instance -> instance.forEach(i -> {
|
||||||
|
if (Optional
|
||||||
|
.ofNullable(i.getPid())
|
||||||
|
.map(pid -> pid.stream().noneMatch(p -> p.getValue().startsWith("10.6092/unibo/amsacta")))
|
||||||
|
.orElse(false)) {
|
||||||
|
i.setHostedby(UNKNOWN_REPOSITORY);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf.utils;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
|
|
||||||
|
public class MergeEntitiesComparator implements Comparator<Oaf> {
|
||||||
|
static final List<String> PID_AUTHORITIES = Arrays
|
||||||
|
.asList(
|
||||||
|
ModelConstants.ARXIV_ID,
|
||||||
|
ModelConstants.PUBMED_CENTRAL_ID,
|
||||||
|
ModelConstants.EUROPE_PUBMED_CENTRAL_ID,
|
||||||
|
ModelConstants.DATACITE_ID,
|
||||||
|
ModelConstants.CROSSREF_ID);
|
||||||
|
|
||||||
|
static final List<String> RESULT_TYPES = Arrays
|
||||||
|
.asList(
|
||||||
|
ModelConstants.ORP_RESULTTYPE_CLASSID,
|
||||||
|
ModelConstants.SOFTWARE_RESULTTYPE_CLASSID,
|
||||||
|
ModelConstants.DATASET_RESULTTYPE_CLASSID,
|
||||||
|
ModelConstants.PUBLICATION_RESULTTYPE_CLASSID);
|
||||||
|
|
||||||
|
public static final Comparator<Oaf> INSTANCE = new MergeEntitiesComparator();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compare(Oaf left, Oaf right) {
|
||||||
|
if (left == null && right == null)
|
||||||
|
return 0;
|
||||||
|
if (left == null)
|
||||||
|
return -1;
|
||||||
|
if (right == null)
|
||||||
|
return 1;
|
||||||
|
|
||||||
|
int res = 0;
|
||||||
|
|
||||||
|
// pid authority
|
||||||
|
int cfp1 = Optional
|
||||||
|
.ofNullable(left.getCollectedfrom())
|
||||||
|
.map(
|
||||||
|
cf -> cf
|
||||||
|
.stream()
|
||||||
|
.map(kv -> PID_AUTHORITIES.indexOf(kv.getKey()))
|
||||||
|
.max(Integer::compare)
|
||||||
|
.orElse(-1))
|
||||||
|
.orElse(-1);
|
||||||
|
int cfp2 = Optional
|
||||||
|
.ofNullable(right.getCollectedfrom())
|
||||||
|
.map(
|
||||||
|
cf -> cf
|
||||||
|
.stream()
|
||||||
|
.map(kv -> PID_AUTHORITIES.indexOf(kv.getKey()))
|
||||||
|
.max(Integer::compare)
|
||||||
|
.orElse(-1))
|
||||||
|
.orElse(-1);
|
||||||
|
|
||||||
|
if (cfp1 >= 0 && cfp1 > cfp2) {
|
||||||
|
return 1;
|
||||||
|
} else if (cfp2 >= 0 && cfp2 > cfp1) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// trust
|
||||||
|
if (left.getDataInfo() != null && right.getDataInfo() != null) {
|
||||||
|
res = left.getDataInfo().getTrust().compareTo(right.getDataInfo().getTrust());
|
||||||
|
}
|
||||||
|
|
||||||
|
// result type
|
||||||
|
if (res == 0) {
|
||||||
|
if (left instanceof Result && right instanceof Result) {
|
||||||
|
Result r1 = (Result) left;
|
||||||
|
Result r2 = (Result) right;
|
||||||
|
|
||||||
|
if (r1.getResulttype() == null || r1.getResulttype().getClassid() == null) {
|
||||||
|
if (r2.getResulttype() != null && r2.getResulttype().getClassid() != null) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
} else if (r2.getResulttype() == null || r2.getResulttype().getClassid() == null) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int rt1 = RESULT_TYPES.indexOf(r1.getResulttype().getClassid());
|
||||||
|
int rt2 = RESULT_TYPES.indexOf(r2.getResulttype().getClassid());
|
||||||
|
|
||||||
|
if (rt1 >= 0 && rt1 > rt2) {
|
||||||
|
return 1;
|
||||||
|
} else if (rt2 >= 0 && rt2 > rt1) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// id
|
||||||
|
if (res == 0) {
|
||||||
|
if (left instanceof OafEntity && right instanceof OafEntity) {
|
||||||
|
res = ((OafEntity) left).getId().compareTo(((OafEntity) right).getId());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -40,27 +40,12 @@ public class MergeUtils {
|
||||||
|
|
||||||
public static <T extends Oaf> T mergeGroup(String s, Iterator<T> oafEntityIterator,
|
public static <T extends Oaf> T mergeGroup(String s, Iterator<T> oafEntityIterator,
|
||||||
boolean checkDelegateAuthority) {
|
boolean checkDelegateAuthority) {
|
||||||
TreeSet<T> sortedEntities = new TreeSet<>((o1, o2) -> {
|
|
||||||
int res = 0;
|
|
||||||
|
|
||||||
if (o1.getDataInfo() != null && o2.getDataInfo() != null) {
|
ArrayList<T> sortedEntities = new ArrayList<>();
|
||||||
res = o1.getDataInfo().getTrust().compareTo(o2.getDataInfo().getTrust());
|
oafEntityIterator.forEachRemaining(sortedEntities::add);
|
||||||
}
|
sortedEntities.sort(MergeEntitiesComparator.INSTANCE.reversed());
|
||||||
|
|
||||||
if (res == 0) {
|
Iterator<T> it = sortedEntities.iterator();
|
||||||
if (o1 instanceof Result && o2 instanceof Result) {
|
|
||||||
return ResultTypeComparator.INSTANCE.compare((Result) o1, (Result) o2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
});
|
|
||||||
|
|
||||||
while (oafEntityIterator.hasNext()) {
|
|
||||||
sortedEntities.add(oafEntityIterator.next());
|
|
||||||
}
|
|
||||||
|
|
||||||
Iterator<T> it = sortedEntities.descendingIterator();
|
|
||||||
T merged = it.next();
|
T merged = it.next();
|
||||||
|
|
||||||
while (it.hasNext()) {
|
while (it.hasNext()) {
|
||||||
|
@ -143,7 +128,7 @@ public class MergeUtils {
|
||||||
* https://graph.openaire.eu/docs/data-model/pids-and-identifiers#delegated-authorities and in that case it prefers
|
* https://graph.openaire.eu/docs/data-model/pids-and-identifiers#delegated-authorities and in that case it prefers
|
||||||
* such version.
|
* such version.
|
||||||
* <p>
|
* <p>
|
||||||
* Otherwise, it considers a resulttype priority order implemented in {@link ResultTypeComparator}
|
* Otherwise, it considers a resulttype priority order implemented in {@link MergeEntitiesComparator}
|
||||||
* and proceeds with the canonical property merging.
|
* and proceeds with the canonical property merging.
|
||||||
*
|
*
|
||||||
* @param left
|
* @param left
|
||||||
|
@ -161,8 +146,9 @@ public class MergeUtils {
|
||||||
if (!leftFromDelegatedAuthority && rightFromDelegatedAuthority) {
|
if (!leftFromDelegatedAuthority && rightFromDelegatedAuthority) {
|
||||||
return right;
|
return right;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: raise trust to have preferred fields from one or the other??
|
// TODO: raise trust to have preferred fields from one or the other??
|
||||||
if (new ResultTypeComparator().compare(left, right) < 0) {
|
if (MergeEntitiesComparator.INSTANCE.compare(left, right) > 0) {
|
||||||
return mergeResultFields(left, right);
|
return mergeResultFields(left, right);
|
||||||
} else {
|
} else {
|
||||||
return mergeResultFields(right, left);
|
return mergeResultFields(right, left);
|
||||||
|
@ -225,9 +211,9 @@ public class MergeUtils {
|
||||||
|
|
||||||
private static <T, K> List<T> mergeLists(final List<T> left, final List<T> right, int trust,
|
private static <T, K> List<T> mergeLists(final List<T> left, final List<T> right, int trust,
|
||||||
Function<T, K> keyExtractor, BinaryOperator<T> merger) {
|
Function<T, K> keyExtractor, BinaryOperator<T> merger) {
|
||||||
if (left == null) {
|
if (left == null || left.isEmpty()) {
|
||||||
return right;
|
return right != null ? right : new ArrayList<>();
|
||||||
} else if (right == null) {
|
} else if (right == null || right.isEmpty()) {
|
||||||
return left;
|
return left;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -342,7 +328,7 @@ public class MergeUtils {
|
||||||
final T merged = mergeOafFields(original, enrich, trust);
|
final T merged = mergeOafFields(original, enrich, trust);
|
||||||
|
|
||||||
merged.setOriginalId(unionDistinctListOfString(merged.getOriginalId(), enrich.getOriginalId()));
|
merged.setOriginalId(unionDistinctListOfString(merged.getOriginalId(), enrich.getOriginalId()));
|
||||||
merged.setPid(unionDistinctLists(merged.getPid(), enrich.getPid(), trust));
|
merged.setPid(mergeLists(merged.getPid(), enrich.getPid(), trust, MergeUtils::spKeyExtractor, (p1, p2) -> p1));
|
||||||
merged.setDateofcollection(LocalDateTime.now().toString());
|
merged.setDateofcollection(LocalDateTime.now().toString());
|
||||||
merged
|
merged
|
||||||
.setDateoftransformation(
|
.setDateoftransformation(
|
||||||
|
@ -405,7 +391,7 @@ public class MergeUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
// should be an instance attribute, get the first non-null value
|
// should be an instance attribute, get the first non-null value
|
||||||
merge.setLanguage(coalesce(merge.getLanguage(), enrich.getLanguage()));
|
merge.setLanguage(coalesceQualifier(merge.getLanguage(), enrich.getLanguage()));
|
||||||
|
|
||||||
// distinct countries, do not manage datainfo
|
// distinct countries, do not manage datainfo
|
||||||
merge.setCountry(mergeQualifiers(merge.getCountry(), enrich.getCountry(), trust));
|
merge.setCountry(mergeQualifiers(merge.getCountry(), enrich.getCountry(), trust));
|
||||||
|
@ -446,7 +432,10 @@ public class MergeUtils {
|
||||||
|
|
||||||
// merge datainfo for same context id
|
// merge datainfo for same context id
|
||||||
merge.setContext(mergeLists(merge.getContext(), enrich.getContext(), trust, Context::getId, (r, l) -> {
|
merge.setContext(mergeLists(merge.getContext(), enrich.getContext(), trust, Context::getId, (r, l) -> {
|
||||||
r.getDataInfo().addAll(l.getDataInfo());
|
ArrayList<DataInfo> di = new ArrayList<>();
|
||||||
|
di.addAll(r.getDataInfo());
|
||||||
|
di.addAll(l.getDataInfo());
|
||||||
|
r.setDataInfo(di);
|
||||||
return r;
|
return r;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
@ -575,6 +564,13 @@ public class MergeUtils {
|
||||||
return m != null ? m : e;
|
return m != null ? m : e;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static Qualifier coalesceQualifier(Qualifier m, Qualifier e) {
|
||||||
|
if (m == null || m.getClassid() == null || StringUtils.isBlank(m.getClassid())) {
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
return m;
|
||||||
|
}
|
||||||
|
|
||||||
private static List<Author> mergeAuthors(List<Author> author, List<Author> author1, int trust) {
|
private static List<Author> mergeAuthors(List<Author> author, List<Author> author1, int trust) {
|
||||||
List<List<Author>> authors = new ArrayList<>();
|
List<List<Author>> authors = new ArrayList<>();
|
||||||
if (author != null) {
|
if (author != null) {
|
||||||
|
@ -587,6 +583,10 @@ public class MergeUtils {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String instanceKeyExtractor(Instance i) {
|
private static String instanceKeyExtractor(Instance i) {
|
||||||
|
// three levels of concatenating:
|
||||||
|
// 1. ::
|
||||||
|
// 2. @@
|
||||||
|
// 3. ||
|
||||||
return String
|
return String
|
||||||
.join(
|
.join(
|
||||||
"::",
|
"::",
|
||||||
|
@ -594,10 +594,10 @@ public class MergeUtils {
|
||||||
kvKeyExtractor(i.getCollectedfrom()),
|
kvKeyExtractor(i.getCollectedfrom()),
|
||||||
qualifierKeyExtractor(i.getAccessright()),
|
qualifierKeyExtractor(i.getAccessright()),
|
||||||
qualifierKeyExtractor(i.getInstancetype()),
|
qualifierKeyExtractor(i.getInstancetype()),
|
||||||
Optional.ofNullable(i.getUrl()).map(u -> String.join("::", u)).orElse(null),
|
Optional.ofNullable(i.getUrl()).map(u -> String.join("@@", u)).orElse(null),
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(i.getPid())
|
.ofNullable(i.getPid())
|
||||||
.map(pp -> pp.stream().map(MergeUtils::spKeyExtractor).collect(Collectors.joining("::")))
|
.map(pp -> pp.stream().map(MergeUtils::spKeyExtractor).collect(Collectors.joining("@@")))
|
||||||
.orElse(null));
|
.orElse(null));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -658,6 +658,13 @@ public class MergeUtils {
|
||||||
return d1;
|
return d1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (StringUtils.contains(d1.getValue(), "null")) {
|
||||||
|
return d2;
|
||||||
|
}
|
||||||
|
if (StringUtils.contains(d2.getValue(), "null")) {
|
||||||
|
return d1;
|
||||||
|
}
|
||||||
|
|
||||||
return Stream
|
return Stream
|
||||||
.of(d1, d2)
|
.of(d1, d2)
|
||||||
.min(
|
.min(
|
||||||
|
@ -706,7 +713,7 @@ public class MergeUtils {
|
||||||
private static String spKeyExtractor(StructuredProperty sp) {
|
private static String spKeyExtractor(StructuredProperty sp) {
|
||||||
return Optional
|
return Optional
|
||||||
.ofNullable(sp)
|
.ofNullable(sp)
|
||||||
.map(s -> Joiner.on("::").join(s, qualifierKeyExtractor(s.getQualifier())))
|
.map(s -> Joiner.on("||").join(qualifierKeyExtractor(s.getQualifier()), s.getValue()))
|
||||||
.orElse(null);
|
.orElse(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,87 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.schema.oaf.utils;
|
|
||||||
|
|
||||||
import static eu.dnetlib.dhp.schema.common.ModelConstants.CROSSREF_ID;
|
|
||||||
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
|
|
||||||
public class ResultTypeComparator implements Comparator<Result> {
|
|
||||||
|
|
||||||
public static final ResultTypeComparator INSTANCE = new ResultTypeComparator();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int compare(Result left, Result right) {
|
|
||||||
|
|
||||||
if (left == null && right == null)
|
|
||||||
return 0;
|
|
||||||
if (left == null)
|
|
||||||
return 1;
|
|
||||||
if (right == null)
|
|
||||||
return -1;
|
|
||||||
|
|
||||||
HashSet<String> lCf = getCollectedFromIds(left);
|
|
||||||
HashSet<String> rCf = getCollectedFromIds(right);
|
|
||||||
|
|
||||||
if (lCf.contains(CROSSREF_ID) && !rCf.contains(CROSSREF_ID)) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (!lCf.contains(CROSSREF_ID) && rCf.contains(CROSSREF_ID)) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (left.getResulttype() == null || left.getResulttype().getClassid() == null) {
|
|
||||||
if (right.getResulttype() == null || right.getResulttype().getClassid() == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return 1;
|
|
||||||
} else if (right.getResulttype() == null || right.getResulttype().getClassid() == null) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
String lClass = left.getResulttype().getClassid();
|
|
||||||
String rClass = right.getResulttype().getClassid();
|
|
||||||
|
|
||||||
if (!lClass.equals(rClass)) {
|
|
||||||
if (lClass.equals(ModelConstants.PUBLICATION_RESULTTYPE_CLASSID))
|
|
||||||
return -1;
|
|
||||||
if (rClass.equals(ModelConstants.PUBLICATION_RESULTTYPE_CLASSID))
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
if (lClass.equals(ModelConstants.DATASET_RESULTTYPE_CLASSID))
|
|
||||||
return -1;
|
|
||||||
if (rClass.equals(ModelConstants.DATASET_RESULTTYPE_CLASSID))
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
if (lClass.equals(ModelConstants.SOFTWARE_RESULTTYPE_CLASSID))
|
|
||||||
return -1;
|
|
||||||
if (rClass.equals(ModelConstants.SOFTWARE_RESULTTYPE_CLASSID))
|
|
||||||
return 1;
|
|
||||||
|
|
||||||
if (lClass.equals(ModelConstants.ORP_RESULTTYPE_CLASSID))
|
|
||||||
return -1;
|
|
||||||
if (rClass.equals(ModelConstants.ORP_RESULTTYPE_CLASSID))
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Else (but unlikely), lexicographical ordering will do.
|
|
||||||
return lClass.compareTo(rClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected HashSet<String> getCollectedFromIds(Result left) {
|
|
||||||
return Optional
|
|
||||||
.ofNullable(left.getCollectedfrom())
|
|
||||||
.map(
|
|
||||||
cf -> cf
|
|
||||||
.stream()
|
|
||||||
.map(KeyValue::getKey)
|
|
||||||
.collect(Collectors.toCollection(HashSet::new)))
|
|
||||||
.orElse(new HashSet<>());
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,109 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Disabled;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
@Disabled
|
|
||||||
class ZenodoAPIClientTest {
|
|
||||||
|
|
||||||
private final String URL_STRING = "https://sandbox.zenodo.org/api/deposit/depositions";
|
|
||||||
private final String ACCESS_TOKEN = "";
|
|
||||||
|
|
||||||
private final String CONCEPT_REC_ID = "657113";
|
|
||||||
|
|
||||||
private final String depositionId = "674915";
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testUploadOldDeposition() throws IOException, MissingConceptDoiException {
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
Assertions.assertEquals(200, client.uploadOpenDeposition(depositionId));
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/COVID-19.json.gz")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz"));
|
|
||||||
|
|
||||||
String metadata = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/common/api/metadata.json"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.sendMretadata(metadata));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testNewDeposition() throws IOException {
|
|
||||||
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
Assertions.assertEquals(201, client.newDeposition());
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/COVID-19.json.gz")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz"));
|
|
||||||
|
|
||||||
String metadata = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/common/api/metadata.json"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.sendMretadata(metadata));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testNewVersionNewName() throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
|
|
||||||
Assertions.assertEquals(201, client.newVersion(CONCEPT_REC_ID));
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/newVersion")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testNewVersionOldName() throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
|
|
||||||
Assertions.assertEquals(201, client.newVersion(CONCEPT_REC_ID));
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/newVersion2")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,12 +1,6 @@
|
||||||
|
|
||||||
package eu.dnetlib.pace.common;
|
package eu.dnetlib.pace.common;
|
||||||
|
|
||||||
import com.google.common.base.Joiner;
|
|
||||||
import com.google.common.collect.Sets;
|
|
||||||
import com.ibm.icu.text.Transliterator;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.StringWriter;
|
import java.io.StringWriter;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
@ -15,6 +9,13 @@ import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
|
||||||
|
import com.google.common.base.Joiner;
|
||||||
|
import com.google.common.collect.Sets;
|
||||||
|
import com.ibm.icu.text.Transliterator;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set of common functions for the framework
|
* Set of common functions for the framework
|
||||||
*
|
*
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.pace.tree;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import com.wcohen.ss.AbstractStringDistance;
|
||||||
|
|
||||||
|
import eu.dnetlib.pace.config.Config;
|
||||||
|
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
|
||||||
|
import eu.dnetlib.pace.tree.support.ComparatorClass;
|
||||||
|
|
||||||
|
@ComparatorClass("countryMatch")
|
||||||
|
public class CountryMatch extends AbstractStringComparator {
|
||||||
|
|
||||||
|
public CountryMatch(Map<String, String> params) {
|
||||||
|
super(params, new com.wcohen.ss.JaroWinkler());
|
||||||
|
}
|
||||||
|
|
||||||
|
public CountryMatch(final double weight) {
|
||||||
|
super(weight, new com.wcohen.ss.JaroWinkler());
|
||||||
|
}
|
||||||
|
|
||||||
|
protected CountryMatch(final double weight, final AbstractStringDistance ssalgo) {
|
||||||
|
super(weight, ssalgo);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double distance(final String a, final String b, final Config conf) {
|
||||||
|
if (a.isEmpty() || b.isEmpty()) {
|
||||||
|
return -1.0; // return -1 if a field is missing
|
||||||
|
}
|
||||||
|
if (a.equalsIgnoreCase("unknown") || b.equalsIgnoreCase("unknown")) {
|
||||||
|
return -1.0; // return -1 if a country is UNKNOWN
|
||||||
|
}
|
||||||
|
|
||||||
|
return a.equals(b) ? 1.0 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public double getWeight() {
|
||||||
|
return super.weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected double normalize(final double d) {
|
||||||
|
return d;
|
||||||
|
}
|
||||||
|
}
|
|
@ -336,4 +336,23 @@ public class ComparatorTest extends AbstractPaceTest {
|
||||||
System.out.println("compare = " + compare);
|
System.out.println("compare = " + compare);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void countryMatch() {
|
||||||
|
|
||||||
|
CountryMatch countryMatch = new CountryMatch(params);
|
||||||
|
|
||||||
|
double result = countryMatch.distance("UNKNOWN", "UNKNOWN", conf);
|
||||||
|
assertEquals(-1.0, result);
|
||||||
|
|
||||||
|
result = countryMatch.distance("CHILE", "UNKNOWN", conf);
|
||||||
|
assertEquals(-1.0, result);
|
||||||
|
|
||||||
|
result = countryMatch.distance("CHILE", "ITALY", conf);
|
||||||
|
assertEquals(0.0, result);
|
||||||
|
|
||||||
|
result = countryMatch.distance("CHILE", "CHILE", conf);
|
||||||
|
assertEquals(1.0, result);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,113 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||||
|
<parent>
|
||||||
|
<artifactId>dhp</artifactId>
|
||||||
|
<groupId>eu.dnetlib.dhp</groupId>
|
||||||
|
<version>1.2.5-SNAPSHOT</version>
|
||||||
|
</parent>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<artifactId>dhp-shade-package</artifactId>
|
||||||
|
<description>This module create a jar of all module dependencies</description>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>shade</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<transformers>
|
||||||
|
<transformer>
|
||||||
|
<mainClass>eu.dnetlib.dhp.oa.dedup.SparkCreateSimRels</mainClass>
|
||||||
|
</transformer>
|
||||||
|
<transformer />
|
||||||
|
<transformer>
|
||||||
|
<resource>META-INF/cxf/bus-extensions.txt</resource>
|
||||||
|
</transformer>
|
||||||
|
</transformers>
|
||||||
|
<filters>
|
||||||
|
<filter>
|
||||||
|
<artifact>*:*</artifact>
|
||||||
|
<excludes>
|
||||||
|
<exclude>META-INF/maven/**</exclude>
|
||||||
|
<exclude>META-INF/*.SF</exclude>
|
||||||
|
<exclude>META-INF/*.DSA</exclude>
|
||||||
|
<exclude>META-INF/*.RSA</exclude>
|
||||||
|
</excludes>
|
||||||
|
</filter>
|
||||||
|
</filters>
|
||||||
|
<relocations>
|
||||||
|
<relocation>
|
||||||
|
<pattern>com</pattern>
|
||||||
|
<shadedPattern>repackaged.com.google.common</shadedPattern>
|
||||||
|
<includes>
|
||||||
|
<include>com.google.common.**</include>
|
||||||
|
</includes>
|
||||||
|
</relocation>
|
||||||
|
</relocations>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
<version>1.18.28</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
<artifactId>junit-jupiter</artifactId>
|
||||||
|
<version>5.6.1</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<artifactId>junit-jupiter-api</artifactId>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<artifactId>junit-jupiter-params</artifactId>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<artifactId>junit-jupiter-engine</artifactId>
|
||||||
|
<groupId>org.junit.jupiter</groupId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.mockito</groupId>
|
||||||
|
<artifactId>mockito-core</artifactId>
|
||||||
|
<version>3.3.3</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<artifactId>byte-buddy</artifactId>
|
||||||
|
<groupId>net.bytebuddy</groupId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<artifactId>byte-buddy-agent</artifactId>
|
||||||
|
<groupId>net.bytebuddy</groupId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.mockito</groupId>
|
||||||
|
<artifactId>mockito-junit-jupiter</artifactId>
|
||||||
|
<version>3.3.3</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
<distributionManagement>
|
||||||
|
<site>
|
||||||
|
<id>DHPSite</id>
|
||||||
|
<url>${dhp.site.stage.path}/dhp-common</url>
|
||||||
|
</site>
|
||||||
|
</distributionManagement>
|
||||||
|
</project>
|
|
@ -103,6 +103,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -156,6 +157,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -95,6 +95,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -125,6 +125,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -95,6 +95,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -103,6 +103,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -155,11 +156,12 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=2560
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputGraphTablePath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
<arg>--inputGraphTablePath</arg><arg>${workingDir}/otherresearchproduct</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.OtherResearchProduct</arg>
|
||||||
|
|
|
@ -95,6 +95,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -103,11 +103,12 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7000
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/publication</arg>
|
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/publication</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
|
@ -156,11 +157,12 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=7000
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputGraphTablePath</arg><arg>${workingDir}/publication</arg>
|
<arg>--inputGraphTablePath</arg><arg>${workingDir}/publication</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
|
|
|
@ -95,11 +95,12 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=10000
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/relation</arg>
|
<arg>--inputGraphTablePath</arg><arg>${inputGraphRootPath}/relation</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
||||||
|
|
|
@ -103,6 +103,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -155,11 +156,12 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=2560
|
--conf spark.sql.shuffle.partitions=4000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputGraphTablePath</arg><arg>${workingDir}/software</arg>
|
<arg>--inputGraphTablePath</arg><arg>${workingDir}/software</arg>
|
||||||
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--graphTableClassName</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
|
|
|
@ -42,6 +42,9 @@ public class Constants {
|
||||||
public static final String NULL = "NULL";
|
public static final String NULL = "NULL";
|
||||||
public static final String NA = "N/A";
|
public static final String NA = "N/A";
|
||||||
|
|
||||||
|
public static final String WEB_CRAWL_ID = "10|openaire____::fb98a192f6a055ba495ef414c330834b";
|
||||||
|
public static final String WEB_CRAWL_NAME = "Web Crawl";
|
||||||
|
|
||||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
private Constants() {
|
private Constants() {
|
||||||
|
|
|
@ -9,6 +9,7 @@ import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
@ -40,9 +41,9 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
private static final Logger log = LoggerFactory.getLogger(PrepareAffiliationRelations.class);
|
private static final Logger log = LoggerFactory.getLogger(PrepareAffiliationRelations.class);
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
private static final String ID_PREFIX = "50|doi_________::";
|
private static final String ID_PREFIX = "50|doi_________::";
|
||||||
public static final String BIP_AFFILIATIONS_CLASSID = "result:organization:bipinference";
|
public static final String BIP_AFFILIATIONS_CLASSID = "result:organization:openaireinference";
|
||||||
public static final String BIP_AFFILIATIONS_CLASSNAME = "Affiliation relation inferred by BIP!";
|
public static final String BIP_AFFILIATIONS_CLASSNAME = "Affiliation relation inferred by OpenAIRE";
|
||||||
public static final String BIP_INFERENCE_PROVENANCE = "bip:affiliation:crossref";
|
public static final String BIP_INFERENCE_PROVENANCE = "openaire:affiliation";
|
||||||
|
|
||||||
public static <I extends Result> void main(String[] args) throws Exception {
|
public static <I extends Result> void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
@ -70,6 +71,9 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
final String dataciteInputPath = parser.get("dataciteInputPath");
|
final String dataciteInputPath = parser.get("dataciteInputPath");
|
||||||
log.info("dataciteInputPath: {}", dataciteInputPath);
|
log.info("dataciteInputPath: {}", dataciteInputPath);
|
||||||
|
|
||||||
|
final String webcrawlInputPath = parser.get("webCrawlInputPath");
|
||||||
|
log.info("webcrawlInputPath: {}", webcrawlInputPath);
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
@ -101,12 +105,18 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
JavaPairRDD<Text, Text> dataciteRelations = prepareAffiliationRelations(
|
JavaPairRDD<Text, Text> dataciteRelations = prepareAffiliationRelations(
|
||||||
spark, dataciteInputPath, collectedFromDatacite);
|
spark, dataciteInputPath, collectedFromDatacite);
|
||||||
|
|
||||||
|
List<KeyValue> collectedFromWebCrawl = OafMapperUtils
|
||||||
|
.listKeyValues(Constants.WEB_CRAWL_ID, Constants.WEB_CRAWL_NAME);
|
||||||
|
JavaPairRDD<Text, Text> webCrawlRelations = prepareAffiliationRelations(
|
||||||
|
spark, webcrawlInputPath, collectedFromWebCrawl);
|
||||||
|
|
||||||
crossrefRelations
|
crossrefRelations
|
||||||
.union(pubmedRelations)
|
.union(pubmedRelations)
|
||||||
.union(openAPCRelations)
|
.union(openAPCRelations)
|
||||||
.union(dataciteRelations)
|
.union(dataciteRelations)
|
||||||
|
.union(webCrawlRelations)
|
||||||
.saveAsHadoopFile(
|
.saveAsHadoopFile(
|
||||||
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, GzipCodec.class);
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -115,19 +115,7 @@ public class PrepareFOSSparkJob implements Serializable {
|
||||||
.forEach(
|
.forEach(
|
||||||
l -> add(sbjs, getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID, true)));
|
l -> add(sbjs, getSubject(l, FOS_CLASS_ID, FOS_CLASS_NAME, UPDATE_SUBJECT_FOS_CLASS_ID, true)));
|
||||||
r.setSubject(sbjs);
|
r.setSubject(sbjs);
|
||||||
r
|
|
||||||
.setDataInfo(
|
|
||||||
OafMapperUtils
|
|
||||||
.dataInfo(
|
|
||||||
false, null, true,
|
|
||||||
false,
|
|
||||||
OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
ModelConstants.PROVENANCE_ENRICH,
|
|
||||||
null,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
||||||
null));
|
|
||||||
return r;
|
return r;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -81,19 +81,7 @@ public class PrepareSDGSparkJob implements Serializable {
|
||||||
s -> sbjs
|
s -> sbjs
|
||||||
.add(getSubject(s.getSbj(), SDG_CLASS_ID, SDG_CLASS_NAME, UPDATE_SUBJECT_SDG_CLASS_ID)));
|
.add(getSubject(s.getSbj(), SDG_CLASS_ID, SDG_CLASS_NAME, UPDATE_SUBJECT_SDG_CLASS_ID)));
|
||||||
r.setSubject(sbjs);
|
r.setSubject(sbjs);
|
||||||
r
|
|
||||||
.setDataInfo(
|
|
||||||
OafMapperUtils
|
|
||||||
.dataInfo(
|
|
||||||
false, null, true,
|
|
||||||
false,
|
|
||||||
OafMapperUtils
|
|
||||||
.qualifier(
|
|
||||||
ModelConstants.PROVENANCE_ENRICH,
|
|
||||||
null,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS),
|
|
||||||
null));
|
|
||||||
return r;
|
return r;
|
||||||
}, Encoders.bean(Result.class))
|
}, Encoders.bean(Result.class))
|
||||||
.write()
|
.write()
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
|
import org.apache.spark.api.java.function.FilterFunction;
|
||||||
import org.apache.spark.api.java.function.FlatMapFunction;
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
import org.apache.spark.sql.*;
|
import org.apache.spark.sql.*;
|
||||||
import org.apache.spark.sql.types.StructType;
|
import org.apache.spark.sql.types.StructType;
|
||||||
|
@ -20,6 +21,7 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.actionmanager.Constants;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
@ -28,6 +30,7 @@ import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.PidType;
|
import eu.dnetlib.dhp.schema.oaf.utils.PidType;
|
||||||
|
import io.netty.util.Constant;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -43,8 +46,7 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
private static final String PMID_PREFIX = "50|pmid________::";
|
private static final String PMID_PREFIX = "50|pmid________::";
|
||||||
|
|
||||||
private static final String PMCID_PREFIX = "50|pmc_________::";
|
private static final String PMCID_PREFIX = "50|pmc_________::";
|
||||||
private static final String WEB_CRAWL_ID = "10|openaire____::fb98a192f6a055ba495ef414c330834b";
|
|
||||||
private static final String WEB_CRAWL_NAME = "Web Crawl";
|
|
||||||
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
public static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
|
@ -70,6 +72,9 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
final String blackListInputPath = parser.get("blackListPath");
|
||||||
|
log.info("blackListInputPath: {}", blackListInputPath);
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
|
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
|
@ -77,29 +82,35 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
|
|
||||||
createActionSet(spark, inputPath, outputPath);
|
createActionSet(spark, inputPath, outputPath, blackListInputPath);
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void createActionSet(SparkSession spark, String inputPath,
|
public static void createActionSet(SparkSession spark, String inputPath,
|
||||||
String outputPath) {
|
String outputPath, String blackListInputPath) {
|
||||||
|
|
||||||
final Dataset<Row> dataset = readWebCrawl(spark, inputPath)
|
final Dataset<Row> dataset = readWebCrawl(spark, inputPath)
|
||||||
.filter("publication_year <= 2020 or country_code=='IE'")
|
.filter("country_code=='IE'")
|
||||||
.drop("publication_year");
|
.drop("publication_year");
|
||||||
|
|
||||||
dataset.flatMap((FlatMapFunction<Row, Relation>) row -> {
|
final Dataset<Row> blackList = readBlackList(spark, blackListInputPath);
|
||||||
List<Relation> ret = new ArrayList<>();
|
|
||||||
final String ror = ROR_PREFIX
|
|
||||||
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("ROR", row.getAs("ror")));
|
|
||||||
ret.addAll(createAffiliationRelationPairDOI(row.getAs("doi"), ror));
|
|
||||||
ret.addAll(createAffiliationRelationPairPMID(row.getAs("pmid"), ror));
|
|
||||||
ret.addAll(createAffiliationRelationPairPMCID(row.getAs("pmcid"), ror));
|
|
||||||
|
|
||||||
return ret
|
dataset
|
||||||
.iterator();
|
.join(blackList, dataset.col("id").equalTo(blackList.col("OpenAlexId")), "left")
|
||||||
}, Encoders.bean(Relation.class))
|
.filter((FilterFunction<Row>) r -> r.getAs("OpenAlexId") == null)
|
||||||
|
.drop("OpenAlexId")
|
||||||
|
.flatMap((FlatMapFunction<Row, Relation>) row -> {
|
||||||
|
List<Relation> ret = new ArrayList<>();
|
||||||
|
final String ror = ROR_PREFIX
|
||||||
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("ROR", row.getAs("ror")));
|
||||||
|
ret.addAll(createAffiliationRelationPairDOI(row.getAs("doi"), ror));
|
||||||
|
// ret.addAll(createAffiliationRelationPairPMID(row.getAs("pmid"), ror));
|
||||||
|
// ret.addAll(createAffiliationRelationPairPMCID(row.getAs("pmcid"), ror));
|
||||||
|
|
||||||
|
return ret
|
||||||
|
.iterator();
|
||||||
|
}, Encoders.bean(Relation.class))
|
||||||
.toJavaRDD()
|
.toJavaRDD()
|
||||||
.map(p -> new AtomicAction(p.getClass(), p))
|
.map(p -> new AtomicAction(p.getClass(), p))
|
||||||
.mapToPair(
|
.mapToPair(
|
||||||
|
@ -129,11 +140,26 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
"institution", functions
|
"institution", functions
|
||||||
.explode(
|
.explode(
|
||||||
functions.col("institutions")))
|
functions.col("institutions")))
|
||||||
|
|
||||||
.selectExpr(
|
.selectExpr(
|
||||||
"id", "doi", "ids.pmcid as pmcid", "ids.pmid as pmid", "institution.ror as ror",
|
"id", "doi", "institution.ror as ror",
|
||||||
"institution.country_code as country_code", "publication_year")
|
"institution.country_code as country_code", "publication_year")
|
||||||
.distinct();
|
.distinct();
|
||||||
|
|
||||||
|
// .selectExpr(
|
||||||
|
// "id", "doi", "ids.pmcid as pmcid", "ids.pmid as pmid", "institution.ror as ror",
|
||||||
|
// "institution.country_code as country_code", "publication_year")
|
||||||
|
// .distinct();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Dataset<Row> readBlackList(SparkSession spark, String inputPath) {
|
||||||
|
|
||||||
|
return spark
|
||||||
|
.read()
|
||||||
|
.option("header", true)
|
||||||
|
.csv(inputPath)
|
||||||
|
.select("OpenAlexId");
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<Relation> createAffiliationRelationPairPMCID(String pmcid, String ror) {
|
private static List<Relation> createAffiliationRelationPairPMCID(String pmcid, String ror) {
|
||||||
|
@ -195,7 +221,7 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
ModelConstants.IS_AUTHOR_INSTITUTION_OF,
|
ModelConstants.IS_AUTHOR_INSTITUTION_OF,
|
||||||
Arrays
|
Arrays
|
||||||
.asList(
|
.asList(
|
||||||
OafMapperUtils.keyValue(WEB_CRAWL_ID, WEB_CRAWL_NAME)),
|
OafMapperUtils.keyValue(Constants.WEB_CRAWL_ID, Constants.WEB_CRAWL_NAME)),
|
||||||
OafMapperUtils
|
OafMapperUtils
|
||||||
.dataInfo(
|
.dataInfo(
|
||||||
false, null, false, false,
|
false, null, false, false,
|
||||||
|
@ -214,7 +240,7 @@ public class CreateActionSetFromWebEntries implements Serializable {
|
||||||
ModelConstants.HAS_AUTHOR_INSTITUTION,
|
ModelConstants.HAS_AUTHOR_INSTITUTION,
|
||||||
Arrays
|
Arrays
|
||||||
.asList(
|
.asList(
|
||||||
OafMapperUtils.keyValue(WEB_CRAWL_ID, WEB_CRAWL_NAME)),
|
OafMapperUtils.keyValue(Constants.WEB_CRAWL_ID, Constants.WEB_CRAWL_NAME)),
|
||||||
OafMapperUtils
|
OafMapperUtils
|
||||||
.dataInfo(
|
.dataInfo(
|
||||||
false, null, false, false,
|
false, null, false, false,
|
||||||
|
|
|
@ -18,7 +18,11 @@ import javax.xml.transform.TransformerConfigurationException;
|
||||||
import javax.xml.transform.TransformerFactory;
|
import javax.xml.transform.TransformerFactory;
|
||||||
import javax.xml.transform.dom.DOMSource;
|
import javax.xml.transform.dom.DOMSource;
|
||||||
import javax.xml.transform.stream.StreamResult;
|
import javax.xml.transform.stream.StreamResult;
|
||||||
import javax.xml.xpath.*;
|
import javax.xml.xpath.XPath;
|
||||||
|
import javax.xml.xpath.XPathConstants;
|
||||||
|
import javax.xml.xpath.XPathExpression;
|
||||||
|
import javax.xml.xpath.XPathExpressionException;
|
||||||
|
import javax.xml.xpath.XPathFactory;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
|
@ -35,7 +39,7 @@ import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||||
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
import eu.dnetlib.dhp.common.collection.HttpClientParams;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* log.info(...) equal to log.trace(...) in the application-logs
|
* log.info(...) equal to log.trace(...) in the application-logs
|
||||||
* <p>
|
* <p>
|
||||||
* known bug: at resumptionType 'discover' if the (resultTotal % resultSizeValue) == 0 the collecting fails -> change the resultSizeValue
|
* known bug: at resumptionType 'discover' if the (resultTotal % resultSizeValue) == 0 the collecting fails -> change the resultSizeValue
|
||||||
*
|
*
|
||||||
|
@ -47,6 +51,7 @@ public class RestIterator implements Iterator<String> {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(RestIterator.class);
|
private static final Logger log = LoggerFactory.getLogger(RestIterator.class);
|
||||||
public static final String UTF_8 = "UTF-8";
|
public static final String UTF_8 = "UTF-8";
|
||||||
|
private static final int MAX_ATTEMPTS = 5;
|
||||||
|
|
||||||
private final HttpClientParams clientParams;
|
private final HttpClientParams clientParams;
|
||||||
|
|
||||||
|
@ -60,8 +65,9 @@ public class RestIterator implements Iterator<String> {
|
||||||
private final int resultSizeValue;
|
private final int resultSizeValue;
|
||||||
private int resumptionInt = 0; // integer resumption token (first record to harvest)
|
private int resumptionInt = 0; // integer resumption token (first record to harvest)
|
||||||
private int resultTotal = -1;
|
private int resultTotal = -1;
|
||||||
private String resumptionStr = Integer.toString(resumptionInt); // string resumption token (first record to harvest
|
private String resumptionStr = Integer.toString(this.resumptionInt); // string resumption token (first record to
|
||||||
// or token scanned from results)
|
// harvest
|
||||||
|
// or token scanned from results)
|
||||||
private InputStream resultStream;
|
private InputStream resultStream;
|
||||||
private Transformer transformer;
|
private Transformer transformer;
|
||||||
private XPath xpath;
|
private XPath xpath;
|
||||||
|
@ -73,7 +79,7 @@ public class RestIterator implements Iterator<String> {
|
||||||
private final String querySize;
|
private final String querySize;
|
||||||
private final String authMethod;
|
private final String authMethod;
|
||||||
private final String authToken;
|
private final String authToken;
|
||||||
private final Queue<String> recordQueue = new PriorityBlockingQueue<String>();
|
private final Queue<String> recordQueue = new PriorityBlockingQueue<>();
|
||||||
private int discoverResultSize = 0;
|
private int discoverResultSize = 0;
|
||||||
private int pagination = 1;
|
private int pagination = 1;
|
||||||
/*
|
/*
|
||||||
|
@ -83,8 +89,8 @@ public class RestIterator implements Iterator<String> {
|
||||||
*/
|
*/
|
||||||
private final String resultOutputFormat;
|
private final String resultOutputFormat;
|
||||||
|
|
||||||
/** RestIterator class
|
/**
|
||||||
* compatible to version 1.3.33
|
* RestIterator class compatible to version 1.3.33
|
||||||
*/
|
*/
|
||||||
public RestIterator(
|
public RestIterator(
|
||||||
final HttpClientParams clientParams,
|
final HttpClientParams clientParams,
|
||||||
|
@ -108,40 +114,42 @@ public class RestIterator implements Iterator<String> {
|
||||||
this.resumptionType = resumptionType;
|
this.resumptionType = resumptionType;
|
||||||
this.resumptionParam = resumptionParam;
|
this.resumptionParam = resumptionParam;
|
||||||
this.resultFormatValue = resultFormatValue;
|
this.resultFormatValue = resultFormatValue;
|
||||||
this.resultSizeValue = Integer.valueOf(resultSizeValueStr);
|
this.resultSizeValue = Integer.parseInt(resultSizeValueStr);
|
||||||
this.queryParams = queryParams;
|
this.queryParams = queryParams;
|
||||||
this.authMethod = authMethod;
|
this.authMethod = authMethod;
|
||||||
this.authToken = authToken;
|
this.authToken = authToken;
|
||||||
this.resultOutputFormat = resultOutputFormat;
|
this.resultOutputFormat = resultOutputFormat;
|
||||||
|
|
||||||
queryFormat = StringUtils.isNotBlank(resultFormatParam) ? "&" + resultFormatParam + "=" + resultFormatValue
|
this.queryFormat = StringUtils.isNotBlank(resultFormatParam) ? "&" + resultFormatParam + "=" + resultFormatValue
|
||||||
|
: "";
|
||||||
|
this.querySize = StringUtils.isNotBlank(resultSizeParam) ? "&" + resultSizeParam + "=" + resultSizeValueStr
|
||||||
: "";
|
: "";
|
||||||
querySize = StringUtils.isNotBlank(resultSizeParam) ? "&" + resultSizeParam + "=" + resultSizeValueStr : "";
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
initXmlTransformation(resultTotalXpath, resumptionXpath, entityXpath);
|
initXmlTransformation(resultTotalXpath, resumptionXpath, entityXpath);
|
||||||
} catch (Exception e) {
|
} catch (final Exception e) {
|
||||||
throw new IllegalStateException("xml transformation init failed: " + e.getMessage());
|
throw new IllegalStateException("xml transformation init failed: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
initQueue();
|
initQueue();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initXmlTransformation(String resultTotalXpath, String resumptionXpath, String entityXpath)
|
private void initXmlTransformation(final String resultTotalXpath, final String resumptionXpath,
|
||||||
|
final String entityXpath)
|
||||||
throws TransformerConfigurationException, XPathExpressionException {
|
throws TransformerConfigurationException, XPathExpressionException {
|
||||||
final TransformerFactory factory = TransformerFactory.newInstance();
|
final TransformerFactory factory = TransformerFactory.newInstance();
|
||||||
transformer = factory.newTransformer();
|
this.transformer = factory.newTransformer();
|
||||||
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
|
this.transformer.setOutputProperty(OutputKeys.INDENT, "yes");
|
||||||
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "3");
|
this.transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "3");
|
||||||
xpath = XPathFactory.newInstance().newXPath();
|
this.xpath = XPathFactory.newInstance().newXPath();
|
||||||
xprResultTotalPath = xpath.compile(resultTotalXpath);
|
this.xprResultTotalPath = this.xpath.compile(resultTotalXpath);
|
||||||
xprResumptionPath = xpath.compile(StringUtils.isBlank(resumptionXpath) ? "/" : resumptionXpath);
|
this.xprResumptionPath = this.xpath.compile(StringUtils.isBlank(resumptionXpath) ? "/" : resumptionXpath);
|
||||||
xprEntity = xpath.compile(entityXpath);
|
this.xprEntity = this.xpath.compile(entityXpath);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initQueue() {
|
private void initQueue() {
|
||||||
query = baseUrl + "?" + queryParams + querySize + queryFormat;
|
this.query = this.baseUrl + "?" + this.queryParams + this.querySize + this.queryFormat;
|
||||||
log.info("REST calls starting with {}", query);
|
log.info("REST calls starting with {}", this.query);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void disconnect() {
|
private void disconnect() {
|
||||||
|
@ -154,12 +162,11 @@ public class RestIterator implements Iterator<String> {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean hasNext() {
|
public boolean hasNext() {
|
||||||
if (recordQueue.isEmpty() && query.isEmpty()) {
|
if (this.recordQueue.isEmpty() && this.query.isEmpty()) {
|
||||||
disconnect();
|
disconnect();
|
||||||
return false;
|
return false;
|
||||||
} else {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -168,214 +175,241 @@ public class RestIterator implements Iterator<String> {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String next() {
|
public String next() {
|
||||||
synchronized (recordQueue) {
|
synchronized (this.recordQueue) {
|
||||||
while (recordQueue.isEmpty() && !query.isEmpty()) {
|
while (this.recordQueue.isEmpty() && !this.query.isEmpty()) {
|
||||||
try {
|
try {
|
||||||
query = downloadPage(query);
|
this.query = downloadPage(this.query, 0);
|
||||||
} catch (CollectorException e) {
|
} catch (final CollectorException e) {
|
||||||
log.debug("CollectorPlugin.next()-Exception: {}", e);
|
log.debug("CollectorPlugin.next()-Exception: {}", e);
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return recordQueue.poll();
|
return this.recordQueue.poll();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* download page and return nextQuery
|
* download page and return nextQuery (with number of attempt)
|
||||||
*/
|
*/
|
||||||
private String downloadPage(String query) throws CollectorException {
|
private String downloadPage(String query, final int attempt) throws CollectorException {
|
||||||
String resultJson;
|
|
||||||
String resultXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
|
|
||||||
String nextQuery = "";
|
|
||||||
String emptyXml = resultXml + "<" + JsonUtils.XML_WRAP_TAG + "></" + JsonUtils.XML_WRAP_TAG + ">";
|
|
||||||
Node resultNode = null;
|
|
||||||
NodeList nodeList = null;
|
|
||||||
String qUrlArgument = "";
|
|
||||||
int urlOldResumptionSize = 0;
|
|
||||||
InputStream theHttpInputStream;
|
|
||||||
|
|
||||||
// check if cursor=* is initial set otherwise add it to the queryParam URL
|
if (attempt > MAX_ATTEMPTS) {
|
||||||
if (resumptionType.equalsIgnoreCase("deep-cursor")) {
|
throw new CollectorException("Max Number of attempts reached, query:" + query);
|
||||||
log.debug("check resumptionType deep-cursor and check cursor=*?{}", query);
|
}
|
||||||
if (!query.contains("&cursor=")) {
|
|
||||||
query += "&cursor=*";
|
if (attempt > 0) {
|
||||||
|
final int delay = (attempt * 5000);
|
||||||
|
log.debug("Attempt {} with delay {}", attempt, delay);
|
||||||
|
try {
|
||||||
|
Thread.sleep(delay);
|
||||||
|
} catch (final InterruptedException e) {
|
||||||
|
new CollectorException(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
log.info("requestig URL [{}]", query);
|
String resultJson;
|
||||||
|
String resultXml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
|
||||||
|
String nextQuery = "";
|
||||||
|
final String emptyXml = resultXml + "<" + JsonUtils.XML_WRAP_TAG + "></" + JsonUtils.XML_WRAP_TAG + ">";
|
||||||
|
Node resultNode = null;
|
||||||
|
NodeList nodeList = null;
|
||||||
|
String qUrlArgument = "";
|
||||||
|
int urlOldResumptionSize = 0;
|
||||||
|
InputStream theHttpInputStream;
|
||||||
|
|
||||||
URL qUrl = new URL(query);
|
// check if cursor=* is initial set otherwise add it to the queryParam URL
|
||||||
log.debug("authMethod: {}", authMethod);
|
if ("deep-cursor".equalsIgnoreCase(this.resumptionType)) {
|
||||||
if ("bearer".equalsIgnoreCase(this.authMethod)) {
|
log.debug("check resumptionType deep-cursor and check cursor=*?{}", query);
|
||||||
log.trace("authMethod before inputStream: {}", resultXml);
|
if (!query.contains("&cursor=")) {
|
||||||
HttpURLConnection conn = (HttpURLConnection) qUrl.openConnection();
|
query += "&cursor=*";
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + authToken);
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.getMimeType());
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
theHttpInputStream = conn.getInputStream();
|
|
||||||
} else if (BASIC.equalsIgnoreCase(this.authMethod)) {
|
|
||||||
log.trace("authMethod before inputStream: {}", resultXml);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) qUrl.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Basic " + authToken);
|
|
||||||
conn.setRequestProperty(HttpHeaders.ACCEPT, ContentType.APPLICATION_XML.getMimeType());
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
theHttpInputStream = conn.getInputStream();
|
|
||||||
} else {
|
|
||||||
theHttpInputStream = qUrl.openStream();
|
|
||||||
}
|
|
||||||
|
|
||||||
resultStream = theHttpInputStream;
|
|
||||||
if ("json".equals(resultOutputFormat)) {
|
|
||||||
resultJson = IOUtils.toString(resultStream, StandardCharsets.UTF_8);
|
|
||||||
resultXml = JsonUtils.convertToXML(resultJson);
|
|
||||||
resultStream = IOUtils.toInputStream(resultXml, UTF_8);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!(emptyXml).equalsIgnoreCase(resultXml)) {
|
|
||||||
resultNode = (Node) xpath.evaluate("/", new InputSource(resultStream), XPathConstants.NODE);
|
|
||||||
nodeList = (NodeList) xprEntity.evaluate(resultNode, XPathConstants.NODESET);
|
|
||||||
log.debug("nodeList.length: {}", nodeList.getLength());
|
|
||||||
for (int i = 0; i < nodeList.getLength(); i++) {
|
|
||||||
StringWriter sw = new StringWriter();
|
|
||||||
transformer.transform(new DOMSource(nodeList.item(i)), new StreamResult(sw));
|
|
||||||
String toEnqueue = sw.toString();
|
|
||||||
if (toEnqueue == null || StringUtils.isBlank(toEnqueue) || emptyXml.equalsIgnoreCase(toEnqueue)) {
|
|
||||||
log.warn("The following record resulted in empty item for the feeding queue: {}", resultXml);
|
|
||||||
} else {
|
|
||||||
recordQueue.add(sw.toString());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
log.warn("resultXml is equal with emptyXml");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
resumptionInt += resultSizeValue;
|
try {
|
||||||
|
log.info("requesting URL [{}]", query);
|
||||||
|
|
||||||
switch (resumptionType.toLowerCase()) {
|
final URL qUrl = new URL(query);
|
||||||
case "scan": // read of resumptionToken , evaluate next results, e.g. OAI, iterate over items
|
log.debug("authMethod: {}", this.authMethod);
|
||||||
resumptionStr = xprResumptionPath.evaluate(resultNode);
|
if ("bearer".equalsIgnoreCase(this.authMethod)) {
|
||||||
break;
|
log.trace("authMethod before inputStream: {}", resultXml);
|
||||||
|
final HttpURLConnection conn = (HttpURLConnection) qUrl.openConnection();
|
||||||
|
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + this.authToken);
|
||||||
|
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.getMimeType());
|
||||||
|
conn.setRequestMethod("GET");
|
||||||
|
theHttpInputStream = conn.getInputStream();
|
||||||
|
} else if (this.BASIC.equalsIgnoreCase(this.authMethod)) {
|
||||||
|
log.trace("authMethod before inputStream: {}", resultXml);
|
||||||
|
final HttpURLConnection conn = (HttpURLConnection) qUrl.openConnection();
|
||||||
|
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Basic " + this.authToken);
|
||||||
|
conn.setRequestProperty(HttpHeaders.ACCEPT, ContentType.APPLICATION_XML.getMimeType());
|
||||||
|
conn.setRequestMethod("GET");
|
||||||
|
theHttpInputStream = conn.getInputStream();
|
||||||
|
} else {
|
||||||
|
theHttpInputStream = qUrl.openStream();
|
||||||
|
}
|
||||||
|
|
||||||
case "count": // begin at one step for all records, iterate over items
|
this.resultStream = theHttpInputStream;
|
||||||
resumptionStr = Integer.toString(resumptionInt);
|
if ("json".equals(this.resultOutputFormat)) {
|
||||||
break;
|
resultJson = IOUtils.toString(this.resultStream, StandardCharsets.UTF_8);
|
||||||
|
resultXml = JsonUtils.convertToXML(resultJson);
|
||||||
|
this.resultStream = IOUtils.toInputStream(resultXml, UTF_8);
|
||||||
|
}
|
||||||
|
|
||||||
case "discover": // size of result items unknown, iterate over items (for openDOAR - 201808)
|
if (!(emptyXml).equalsIgnoreCase(resultXml)) {
|
||||||
if (resultSizeValue < 2) {
|
resultNode = (Node) this.xpath
|
||||||
throw new CollectorException("Mode: discover, Param 'resultSizeValue' is less than 2");
|
.evaluate("/", new InputSource(this.resultStream), XPathConstants.NODE);
|
||||||
|
nodeList = (NodeList) this.xprEntity.evaluate(resultNode, XPathConstants.NODESET);
|
||||||
|
log.debug("nodeList.length: {}", nodeList.getLength());
|
||||||
|
for (int i = 0; i < nodeList.getLength(); i++) {
|
||||||
|
final StringWriter sw = new StringWriter();
|
||||||
|
this.transformer.transform(new DOMSource(nodeList.item(i)), new StreamResult(sw));
|
||||||
|
final String toEnqueue = sw.toString();
|
||||||
|
if ((toEnqueue == null) || StringUtils.isBlank(toEnqueue)
|
||||||
|
|| emptyXml.equalsIgnoreCase(toEnqueue)) {
|
||||||
|
log
|
||||||
|
.warn(
|
||||||
|
"The following record resulted in empty item for the feeding queue: {}", resultXml);
|
||||||
|
} else {
|
||||||
|
this.recordQueue.add(sw.toString());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
qUrlArgument = qUrl.getQuery();
|
} else {
|
||||||
String[] arrayQUrlArgument = qUrlArgument.split("&");
|
log.warn("resultXml is equal with emptyXml");
|
||||||
for (String arrayUrlArgStr : arrayQUrlArgument) {
|
}
|
||||||
if (arrayUrlArgStr.startsWith(resumptionParam)) {
|
|
||||||
String[] resumptionKeyValue = arrayUrlArgStr.split("=");
|
this.resumptionInt += this.resultSizeValue;
|
||||||
if (isInteger(resumptionKeyValue[1])) {
|
|
||||||
urlOldResumptionSize = Integer.parseInt(resumptionKeyValue[1]);
|
switch (this.resumptionType.toLowerCase()) {
|
||||||
log.debug("discover OldResumptionSize from Url (int): {}", urlOldResumptionSize);
|
case "scan": // read of resumptionToken , evaluate next results, e.g. OAI, iterate over items
|
||||||
} else {
|
this.resumptionStr = this.xprResumptionPath.evaluate(resultNode);
|
||||||
log.debug("discover OldResumptionSize from Url (str): {}", resumptionKeyValue[1]);
|
break;
|
||||||
|
|
||||||
|
case "count": // begin at one step for all records, iterate over items
|
||||||
|
this.resumptionStr = Integer.toString(this.resumptionInt);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case "discover": // size of result items unknown, iterate over items (for openDOAR - 201808)
|
||||||
|
if (this.resultSizeValue < 2) {
|
||||||
|
throw new CollectorException("Mode: discover, Param 'resultSizeValue' is less than 2");
|
||||||
|
}
|
||||||
|
qUrlArgument = qUrl.getQuery();
|
||||||
|
final String[] arrayQUrlArgument = qUrlArgument.split("&");
|
||||||
|
for (final String arrayUrlArgStr : arrayQUrlArgument) {
|
||||||
|
if (arrayUrlArgStr.startsWith(this.resumptionParam)) {
|
||||||
|
final String[] resumptionKeyValue = arrayUrlArgStr.split("=");
|
||||||
|
if (isInteger(resumptionKeyValue[1])) {
|
||||||
|
urlOldResumptionSize = Integer.parseInt(resumptionKeyValue[1]);
|
||||||
|
log.debug("discover OldResumptionSize from Url (int): {}", urlOldResumptionSize);
|
||||||
|
} else {
|
||||||
|
log.debug("discover OldResumptionSize from Url (str): {}", resumptionKeyValue[1]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (((emptyXml).equalsIgnoreCase(resultXml))
|
if (((emptyXml).equalsIgnoreCase(resultXml))
|
||||||
|| ((nodeList != null) && (nodeList.getLength() < resultSizeValue))) {
|
|| ((nodeList != null) && (nodeList.getLength() < this.resultSizeValue))) {
|
||||||
// resumptionStr = "";
|
// resumptionStr = "";
|
||||||
if (nodeList != null) {
|
if (nodeList != null) {
|
||||||
discoverResultSize += nodeList.getLength();
|
this.discoverResultSize += nodeList.getLength();
|
||||||
|
}
|
||||||
|
this.resultTotal = this.discoverResultSize;
|
||||||
|
} else {
|
||||||
|
this.resumptionStr = Integer.toString(this.resumptionInt);
|
||||||
|
this.resultTotal = this.resumptionInt + 1;
|
||||||
|
if (nodeList != null) {
|
||||||
|
this.discoverResultSize += nodeList.getLength();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
resultTotal = discoverResultSize;
|
log.info("discoverResultSize: {}", this.discoverResultSize);
|
||||||
} else {
|
break;
|
||||||
resumptionStr = Integer.toString(resumptionInt);
|
|
||||||
resultTotal = resumptionInt + 1;
|
case "pagination":
|
||||||
|
case "page": // pagination, iterate over page numbers
|
||||||
|
this.pagination += 1;
|
||||||
if (nodeList != null) {
|
if (nodeList != null) {
|
||||||
discoverResultSize += nodeList.getLength();
|
this.discoverResultSize += nodeList.getLength();
|
||||||
|
} else {
|
||||||
|
this.resultTotal = this.discoverResultSize;
|
||||||
|
this.pagination = this.discoverResultSize;
|
||||||
}
|
}
|
||||||
}
|
this.resumptionInt = this.pagination;
|
||||||
log.info("discoverResultSize: {}", discoverResultSize);
|
this.resumptionStr = Integer.toString(this.resumptionInt);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "pagination":
|
case "deep-cursor": // size of result items unknown, iterate over items (for supporting deep cursor
|
||||||
case "page": // pagination, iterate over page numbers
|
// in
|
||||||
pagination += 1;
|
// solr)
|
||||||
if (nodeList != null) {
|
// isn't relevant -- if (resultSizeValue < 2) {throw new CollectorServiceException("Mode:
|
||||||
discoverResultSize += nodeList.getLength();
|
// deep-cursor, Param 'resultSizeValue' is less than 2");}
|
||||||
} else {
|
|
||||||
resultTotal = discoverResultSize;
|
|
||||||
pagination = discoverResultSize;
|
|
||||||
}
|
|
||||||
resumptionInt = pagination;
|
|
||||||
resumptionStr = Integer.toString(resumptionInt);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case "deep-cursor": // size of result items unknown, iterate over items (for supporting deep cursor in
|
this.resumptionStr = encodeValue(this.xprResumptionPath.evaluate(resultNode));
|
||||||
// solr)
|
this.queryParams = this.queryParams.replace("&cursor=*", "");
|
||||||
// isn't relevant -- if (resultSizeValue < 2) {throw new CollectorServiceException("Mode:
|
|
||||||
// deep-cursor, Param 'resultSizeValue' is less than 2");}
|
|
||||||
|
|
||||||
resumptionStr = encodeValue(xprResumptionPath.evaluate(resultNode));
|
// terminating if length of nodeList is 0
|
||||||
queryParams = queryParams.replace("&cursor=*", "");
|
if ((nodeList != null) && (nodeList.getLength() < this.discoverResultSize)) {
|
||||||
|
this.resumptionInt += ((nodeList.getLength() + 1) - this.resultSizeValue);
|
||||||
|
} else {
|
||||||
|
this.resumptionInt += (nodeList.getLength() - this.resultSizeValue); // subtract the
|
||||||
|
// resultSizeValue
|
||||||
|
// because the iteration is over
|
||||||
|
// real length and the
|
||||||
|
// resultSizeValue is added before
|
||||||
|
// the switch()
|
||||||
|
}
|
||||||
|
|
||||||
// terminating if length of nodeList is 0
|
this.discoverResultSize = nodeList.getLength();
|
||||||
if ((nodeList != null) && (nodeList.getLength() < discoverResultSize)) {
|
|
||||||
resumptionInt += (nodeList.getLength() + 1 - resultSizeValue);
|
|
||||||
} else {
|
|
||||||
resumptionInt += (nodeList.getLength() - resultSizeValue); // subtract the resultSizeValue
|
|
||||||
// because the iteration is over
|
|
||||||
// real length and the
|
|
||||||
// resultSizeValue is added before
|
|
||||||
// the switch()
|
|
||||||
}
|
|
||||||
|
|
||||||
discoverResultSize = nodeList.getLength();
|
log
|
||||||
|
.debug(
|
||||||
|
"downloadPage().deep-cursor: resumptionStr=" + this.resumptionStr + " ; queryParams="
|
||||||
|
+ this.queryParams + " resumptionLengthIncreased: " + this.resumptionInt);
|
||||||
|
|
||||||
log
|
break;
|
||||||
.debug(
|
|
||||||
"downloadPage().deep-cursor: resumptionStr=" + resumptionStr + " ; queryParams="
|
|
||||||
+ queryParams + " resumptionLengthIncreased: " + resumptionInt);
|
|
||||||
|
|
||||||
break;
|
default: // otherwise: abort
|
||||||
|
// resultTotal = resumptionInt;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
default: // otherwise: abort
|
} catch (final Exception e) {
|
||||||
// resultTotal = resumptionInt;
|
log.error(e.getMessage(), e);
|
||||||
break;
|
throw new IllegalStateException("collection failed: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (Exception e) {
|
try {
|
||||||
log.error(e.getMessage(), e);
|
if (this.resultTotal == -1) {
|
||||||
throw new IllegalStateException("collection failed: " + e.getMessage());
|
this.resultTotal = Integer.parseInt(this.xprResultTotalPath.evaluate(resultNode));
|
||||||
}
|
if ("page".equalsIgnoreCase(this.resumptionType) && !this.BASIC.equalsIgnoreCase(this.authMethod)) {
|
||||||
|
this.resultTotal += 1;
|
||||||
try {
|
} // to correct the upper bound
|
||||||
if (resultTotal == -1) {
|
log.info("resultTotal was -1 is now: " + this.resultTotal);
|
||||||
resultTotal = Integer.parseInt(xprResultTotalPath.evaluate(resultNode));
|
}
|
||||||
if (resumptionType.equalsIgnoreCase("page") && !BASIC.equalsIgnoreCase(authMethod)) {
|
} catch (final Exception e) {
|
||||||
resultTotal += 1;
|
log.error(e.getMessage(), e);
|
||||||
} // to correct the upper bound
|
throw new IllegalStateException("downloadPage resultTotal couldn't parse: " + e.getMessage());
|
||||||
log.info("resultTotal was -1 is now: " + resultTotal);
|
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
log.debug("resultTotal: " + this.resultTotal);
|
||||||
log.error(e.getMessage(), e);
|
log.debug("resInt: " + this.resumptionInt);
|
||||||
throw new IllegalStateException("downloadPage resultTotal couldn't parse: " + e.getMessage());
|
if (this.resumptionInt <= this.resultTotal) {
|
||||||
|
nextQuery = this.baseUrl + "?" + this.queryParams + this.querySize + "&" + this.resumptionParam + "="
|
||||||
|
+ this.resumptionStr
|
||||||
|
+ this.queryFormat;
|
||||||
|
} else {
|
||||||
|
nextQuery = "";
|
||||||
|
// if (resumptionType.toLowerCase().equals("deep-cursor")) { resumptionInt -= 1; } // correct the
|
||||||
|
// resumptionInt and prevent a NullPointer Exception at mdStore
|
||||||
|
}
|
||||||
|
log.debug("nextQueryUrl: " + nextQuery);
|
||||||
|
return nextQuery;
|
||||||
|
} catch (final Throwable e) {
|
||||||
|
log.warn(e.getMessage(), e);
|
||||||
|
return downloadPage(query, attempt + 1);
|
||||||
}
|
}
|
||||||
log.debug("resultTotal: " + resultTotal);
|
|
||||||
log.debug("resInt: " + resumptionInt);
|
|
||||||
if (resumptionInt <= resultTotal) {
|
|
||||||
nextQuery = baseUrl + "?" + queryParams + querySize + "&" + resumptionParam + "=" + resumptionStr
|
|
||||||
+ queryFormat;
|
|
||||||
} else {
|
|
||||||
nextQuery = "";
|
|
||||||
// if (resumptionType.toLowerCase().equals("deep-cursor")) { resumptionInt -= 1; } // correct the
|
|
||||||
// resumptionInt and prevent a NullPointer Exception at mdStore
|
|
||||||
}
|
|
||||||
log.debug("nextQueryUrl: " + nextQuery);
|
|
||||||
return nextQuery;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isInteger(String s) {
|
private boolean isInteger(final String s) {
|
||||||
boolean isValidInteger = false;
|
boolean isValidInteger = false;
|
||||||
try {
|
try {
|
||||||
Integer.parseInt(s);
|
Integer.parseInt(s);
|
||||||
|
@ -383,7 +417,7 @@ public class RestIterator implements Iterator<String> {
|
||||||
// s is a valid integer
|
// s is a valid integer
|
||||||
|
|
||||||
isValidInteger = true;
|
isValidInteger = true;
|
||||||
} catch (NumberFormatException ex) {
|
} catch (final NumberFormatException ex) {
|
||||||
// s is not an integer
|
// s is not an integer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -391,20 +425,20 @@ public class RestIterator implements Iterator<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Method to encode a string value using `UTF-8` encoding scheme
|
// Method to encode a string value using `UTF-8` encoding scheme
|
||||||
private String encodeValue(String value) {
|
private String encodeValue(final String value) {
|
||||||
try {
|
try {
|
||||||
return URLEncoder.encode(value, StandardCharsets.UTF_8.toString());
|
return URLEncoder.encode(value, StandardCharsets.UTF_8.toString());
|
||||||
} catch (UnsupportedEncodingException ex) {
|
} catch (final UnsupportedEncodingException ex) {
|
||||||
throw new RuntimeException(ex.getCause());
|
throw new RuntimeException(ex.getCause());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getResultFormatValue() {
|
public String getResultFormatValue() {
|
||||||
return resultFormatValue;
|
return this.resultFormatValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getResultOutputFormat() {
|
public String getResultOutputFormat() {
|
||||||
return resultOutputFormat;
|
return this.resultOutputFormat;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,134 @@
|
||||||
|
package eu.dnetlib.dhp.transformation.xslt;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import net.sf.saxon.s9api.*;
|
||||||
|
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.json.JSONObject;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class fetches JSON from a provided link and returns
|
||||||
|
* a Dublin Core. This functionality is particularly needed for OSF Preprints
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
public class DataFetcher implements ExtensionFunction, Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method fetches JSON object from a given URL
|
||||||
|
* @param url a url in the metadata for fetching authors in JSON format
|
||||||
|
* @return
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
static JSONObject getJson(URL url) throws IOException {
|
||||||
|
|
||||||
|
String json = IOUtils.toString(url);
|
||||||
|
return new JSONObject(json);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method extracts authors from a given JSON
|
||||||
|
*
|
||||||
|
* @param jsonObject
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
static List<String> getAuthorsFromJson(JSONObject jsonObject) {
|
||||||
|
List<String> authors = new ArrayList<>();
|
||||||
|
// count of authors
|
||||||
|
int countOfAuthors = jsonObject.getJSONArray("data").length();
|
||||||
|
for (int i = 0; i < countOfAuthors; i++) {
|
||||||
|
|
||||||
|
authors.add(jsonObject
|
||||||
|
.getJSONArray("data")
|
||||||
|
.getJSONObject(i)
|
||||||
|
.getJSONObject("embeds")
|
||||||
|
.getJSONObject("users")
|
||||||
|
.getJSONObject("data")
|
||||||
|
.getJSONObject("attributes")
|
||||||
|
.getString("full_name"));
|
||||||
|
}
|
||||||
|
return authors;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method transforms list of authors into Dublin Core
|
||||||
|
* @param authors
|
||||||
|
* @return Dublin Core list of authors
|
||||||
|
*/
|
||||||
|
static List<String> transformListToDublinCore(List<String> authors) {
|
||||||
|
|
||||||
|
List<String> dublinCoreAuthors = new ArrayList<>();
|
||||||
|
for (String author : authors){
|
||||||
|
|
||||||
|
//splitting full name into first and last names according to OpenAIRE v3 guidelines at:
|
||||||
|
// https://guidelines.openaire.eu/en/latest/literature/field_creator.html
|
||||||
|
// “surname”, “initials” (“first name”) “prefix”.
|
||||||
|
String[] parts = author.split(" ");
|
||||||
|
String firstName = parts[0];
|
||||||
|
String lastName = parts[1];
|
||||||
|
char initialOfFirstName = firstName.charAt(0);
|
||||||
|
|
||||||
|
dublinCoreAuthors.add(
|
||||||
|
"<dc:creator>" + lastName + ", " + initialOfFirstName + ". (" + firstName + ")" + "</dc:creator>");
|
||||||
|
}
|
||||||
|
return dublinCoreAuthors;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a public method which fetches authors and transform them into Dublin Core
|
||||||
|
*/
|
||||||
|
public static String getAndTransformAuthors(URL url) throws IOException{
|
||||||
|
return String.join(", ", transformListToDublinCore(getAuthorsFromJson(getJson(url))));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method extracts link to fulltext from a given JSON
|
||||||
|
*
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
static private String getLinkToFulltextFromJson(JSONObject jsonObject) throws MalformedURLException {
|
||||||
|
|
||||||
|
// note: Link to JSON containing fulltextlink is in "primary_file" attribute.
|
||||||
|
// And in the resultant JSON, “links->download” contains the URL to fulltext
|
||||||
|
|
||||||
|
return jsonObject
|
||||||
|
.getJSONObject("data")
|
||||||
|
.getJSONObject("links")
|
||||||
|
.getString("download");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a public method which fetches link to full text and returns it as a suitable format
|
||||||
|
*/
|
||||||
|
public static String getFullTextLinkAndTransform (URL url )throws IOException{
|
||||||
|
|
||||||
|
return getLinkToFulltextFromJson(getJson(url));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public QName getName() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SequenceType getResultType() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SequenceType[] getArgumentTypes() {
|
||||||
|
return new SequenceType[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XdmValue call(XdmValue[] xdmValues) throws SaxonApiException {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
|
@ -55,6 +55,8 @@ public class XSLTTransformationFunction implements MapFunction<MetadataRecord, M
|
||||||
processor.registerExtensionFunction(new DateCleaner());
|
processor.registerExtensionFunction(new DateCleaner());
|
||||||
processor.registerExtensionFunction(new PersonCleaner());
|
processor.registerExtensionFunction(new PersonCleaner());
|
||||||
|
|
||||||
|
processor.registerExtensionFunction(new DataFetcher());
|
||||||
|
|
||||||
final XsltCompiler comp = processor.newXsltCompiler();
|
final XsltCompiler comp = processor.newXsltCompiler();
|
||||||
QName datasourceIDParam = new QName(DATASOURCE_ID_PARAM);
|
QName datasourceIDParam = new QName(DATASOURCE_ID_PARAM);
|
||||||
comp.setParameter(datasourceIDParam, new XdmAtomicValue(value.getProvenance().getDatasourceId()));
|
comp.setParameter(datasourceIDParam, new XdmAtomicValue(value.getProvenance().getDatasourceId()));
|
||||||
|
|
|
@ -28,7 +28,13 @@
|
||||||
"paramLongName": "dataciteInputPath",
|
"paramLongName": "dataciteInputPath",
|
||||||
"paramDescription": "the path to get the input data from Datacite",
|
"paramDescription": "the path to get the input data from Datacite",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},
|
},{
|
||||||
|
"paramName": "wip",
|
||||||
|
"paramLongName": "webCrawlInputPath",
|
||||||
|
"paramDescription": "the path to get the input data from Web Crawl",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
|
,
|
||||||
{
|
{
|
||||||
"paramName": "o",
|
"paramName": "o",
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "outputPath",
|
||||||
|
|
|
@ -35,5 +35,6 @@ crossrefInputPath=/data/bip-affiliations/crossref-data.json
|
||||||
pubmedInputPath=/data/bip-affiliations/pubmed-data.json
|
pubmedInputPath=/data/bip-affiliations/pubmed-data.json
|
||||||
openapcInputPath=/data/bip-affiliations/openapc-data.json
|
openapcInputPath=/data/bip-affiliations/openapc-data.json
|
||||||
dataciteInputPath=/data/bip-affiliations/datacite-data.json
|
dataciteInputPath=/data/bip-affiliations/datacite-data.json
|
||||||
|
webCrawlInputPath=/data/bip-affiliations/webCrawl/
|
||||||
|
|
||||||
outputPath=/tmp/crossref-affiliations-output-v5
|
outputPath=/tmp/crossref-affiliations-output-v5
|
||||||
|
|
|
@ -17,6 +17,10 @@
|
||||||
<name>dataciteInputPath</name>
|
<name>dataciteInputPath</name>
|
||||||
<description>the path where to find the inferred affiliation relations from Datacite</description>
|
<description>the path where to find the inferred affiliation relations from Datacite</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>webCrawlInputPath</name>
|
||||||
|
<description>the path where to find the inferred affiliation relations from webCrawl</description>
|
||||||
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the path where to store the actionset</description>
|
<description>the path where to store the actionset</description>
|
||||||
|
@ -112,7 +116,7 @@
|
||||||
<arg>--pubmedInputPath</arg><arg>${pubmedInputPath}</arg>
|
<arg>--pubmedInputPath</arg><arg>${pubmedInputPath}</arg>
|
||||||
<arg>--openapcInputPath</arg><arg>${openapcInputPath}</arg>
|
<arg>--openapcInputPath</arg><arg>${openapcInputPath}</arg>
|
||||||
<arg>--dataciteInputPath</arg><arg>${dataciteInputPath}</arg>
|
<arg>--dataciteInputPath</arg><arg>${dataciteInputPath}</arg>
|
||||||
|
<arg>--webCrawlInputPath</arg><arg>${webCrawlInputPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -16,5 +16,10 @@
|
||||||
"paramLongName": "isSparkSessionManaged",
|
"paramLongName": "isSparkSessionManaged",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
}
|
},{
|
||||||
|
"paramName": "bl",
|
||||||
|
"paramLongName": "blackListPath",
|
||||||
|
"paramDescription": "the working path",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
sourcePath=/user/miriam.baglioni/openalex-snapshot/data/works/
|
sourcePath=/user/miriam.baglioni/openalex-snapshot/data/works/
|
||||||
outputPath=/tmp/miriam/webcrawlComplete/
|
outputPath=/tmp/miriam/webcrawlComplete/
|
||||||
|
blackListPath=/user/miriam.baglioni/openalex-blackList
|
||||||
|
|
|
@ -45,6 +45,7 @@
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
|
<arg>--blackListPath</arg><arg>${blackListPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -1,10 +1,5 @@
|
||||||
[
|
[
|
||||||
{
|
|
||||||
"id": "100007630",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100007630",
|
|
||||||
"name": "College of Engineering and Informatics, National University of Ireland, Galway",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "100007731",
|
"id": "100007731",
|
||||||
"uri": "http://dx.doi.org/10.13039/100007731",
|
"uri": "http://dx.doi.org/10.13039/100007731",
|
||||||
|
@ -58,7 +53,7 @@
|
||||||
"uri": "http://dx.doi.org/10.13039/100010414",
|
"uri": "http://dx.doi.org/10.13039/100010414",
|
||||||
"name": "Health Research Board",
|
"name": "Health Research Board",
|
||||||
"synonym": [
|
"synonym": [
|
||||||
"501100001590"
|
"501100001590", "501100023273"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -85,24 +80,6 @@
|
||||||
"name": "Irish College of General Practitioners",
|
"name": "Irish College of General Practitioners",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "100012734",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100012734",
|
|
||||||
"name": "Department for Culture, Heritage and the Gaeltacht, Ireland",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "100012754",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100012754",
|
|
||||||
"name": "Horizon Pharma",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "100012891",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100012891",
|
|
||||||
"name": "Medical Research Charities Group",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "100012919",
|
"id": "100012919",
|
||||||
"uri": "http://dx.doi.org/10.13039/100012919",
|
"uri": "http://dx.doi.org/10.13039/100012919",
|
||||||
|
@ -233,7 +210,7 @@
|
||||||
"id": "100018064",
|
"id": "100018064",
|
||||||
"uri": "http://dx.doi.org/10.13039/100018064",
|
"uri": "http://dx.doi.org/10.13039/100018064",
|
||||||
"name": "Department of Tourism, Culture, Arts, Gaeltacht, Sport and Media",
|
"name": "Department of Tourism, Culture, Arts, Gaeltacht, Sport and Media",
|
||||||
"synonym": []
|
"synonym": ["100012734"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "100018172",
|
"id": "100018172",
|
||||||
|
@ -281,13 +258,13 @@
|
||||||
"id": "100019637",
|
"id": "100019637",
|
||||||
"uri": "http://dx.doi.org/10.13039/100019637",
|
"uri": "http://dx.doi.org/10.13039/100019637",
|
||||||
"name": "Horizon Therapeutics",
|
"name": "Horizon Therapeutics",
|
||||||
"synonym": []
|
"synonym": ["100012754"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "100020174",
|
"id": "100020174",
|
||||||
"uri": "http://dx.doi.org/10.13039/100020174",
|
"uri": "http://dx.doi.org/10.13039/100020174",
|
||||||
"name": "Health Research Charities Ireland",
|
"name": "Health Research Charities Ireland",
|
||||||
"synonym": []
|
"synonym": ["100012891"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "100020202",
|
"id": "100020202",
|
||||||
|
@ -319,12 +296,7 @@
|
||||||
"name": "Centre for Ageing Research and Development in Ireland",
|
"name": "Centre for Ageing Research and Development in Ireland",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100001583",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001583",
|
|
||||||
"name": "Cystinosis Foundation Ireland",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100001584",
|
"id": "501100001584",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001584",
|
"uri": "http://dx.doi.org/10.13039/501100001584",
|
||||||
|
@ -455,13 +427,13 @@
|
||||||
"id": "501100001634",
|
"id": "501100001634",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001634",
|
"uri": "http://dx.doi.org/10.13039/501100001634",
|
||||||
"name": "University of Galway",
|
"name": "University of Galway",
|
||||||
"synonym": []
|
"synonym": ["501100019905", "100007630", "501100020570", "501100023852"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100001635",
|
"id": "501100001635",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001635",
|
"uri": "http://dx.doi.org/10.13039/501100001635",
|
||||||
"name": "University of Limerick",
|
"name": "University of Limerick",
|
||||||
"synonym": []
|
"synonym": ["501100014531"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100001636",
|
"id": "501100001636",
|
||||||
|
@ -491,7 +463,7 @@
|
||||||
"id": "501100002736",
|
"id": "501100002736",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100002736",
|
"uri": "http://dx.doi.org/10.13039/501100002736",
|
||||||
"name": "Covidien",
|
"name": "Covidien",
|
||||||
"synonym": []
|
"synonym": ["501100003956"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100002755",
|
"id": "501100002755",
|
||||||
|
@ -521,7 +493,7 @@
|
||||||
"id": "501100003037",
|
"id": "501100003037",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100003037",
|
"uri": "http://dx.doi.org/10.13039/501100003037",
|
||||||
"name": "Elan",
|
"name": "Elan",
|
||||||
"synonym": []
|
"synonym": ["501100021694"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100003496",
|
"id": "501100003496",
|
||||||
|
@ -541,12 +513,6 @@
|
||||||
"name": "Irish Institute of Clinical Neuroscience",
|
"name": "Irish Institute of Clinical Neuroscience",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100003956",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100003956",
|
|
||||||
"name": "Aspect Medical Systems",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100004162",
|
"id": "501100004162",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100004162",
|
"uri": "http://dx.doi.org/10.13039/501100004162",
|
||||||
|
@ -595,17 +561,11 @@
|
||||||
"name": "Technological University Dublin",
|
"name": "Technological University Dublin",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100009269",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100009269",
|
|
||||||
"name": "Programme of Competitive Forestry Research for Development",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100009315",
|
"id": "501100009315",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100009315",
|
"uri": "http://dx.doi.org/10.13039/501100009315",
|
||||||
"name": "Cystinosis Ireland",
|
"name": "Cystinosis Ireland",
|
||||||
"synonym": []
|
"synonym": ["501100001583"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100010808",
|
"id": "501100010808",
|
||||||
|
@ -625,12 +585,6 @@
|
||||||
"name": "Alimentary Health",
|
"name": "Alimentary Health",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100011103",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100011103",
|
|
||||||
"name": "Rann\u00eds",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100012354",
|
"id": "501100012354",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100012354",
|
"uri": "http://dx.doi.org/10.13039/501100012354",
|
||||||
|
@ -679,12 +633,7 @@
|
||||||
"name": "Irish Centre for High-End Computing",
|
"name": "Irish Centre for High-End Computing",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100019905",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100019905",
|
|
||||||
"name": "Galway University Foundation",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100020036",
|
"id": "501100020036",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100020036",
|
"uri": "http://dx.doi.org/10.13039/501100020036",
|
||||||
|
@ -733,12 +682,6 @@
|
||||||
"name": "Insight SFI Research Centre for Data Analytics",
|
"name": "Insight SFI Research Centre for Data Analytics",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100021694",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100021694",
|
|
||||||
"name": "Elan Pharma International",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100021838",
|
"id": "501100021838",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100021838",
|
"uri": "http://dx.doi.org/10.13039/501100021838",
|
||||||
|
@ -769,12 +712,6 @@
|
||||||
"name": "Institute of Technology, Tralee",
|
"name": "Institute of Technology, Tralee",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100023273",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100023273",
|
|
||||||
"name": "HRB Clinical Research Facility Galway",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100023378",
|
"id": "501100023378",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100023378",
|
"uri": "http://dx.doi.org/10.13039/501100023378",
|
||||||
|
@ -871,12 +808,7 @@
|
||||||
"name": "Energy Policy Research Centre, Economic and Social Research Institute",
|
"name": "Energy Policy Research Centre, Economic and Social Research Institute",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100014531",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100014531",
|
|
||||||
"name": "Physical Education and Sport Sciences Department, University of Limerick",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100014745",
|
"id": "501100014745",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100014745",
|
"uri": "http://dx.doi.org/10.13039/501100014745",
|
||||||
|
@ -889,22 +821,11 @@
|
||||||
"name": "ADAPT - Centre for Digital Content Technology",
|
"name": "ADAPT - Centre for Digital Content Technology",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100020570",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100020570",
|
|
||||||
"name": "College of Medicine, Nursing and Health Sciences, National University of Ireland, Galway",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100020871",
|
"id": "501100020871",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100020871",
|
"uri": "http://dx.doi.org/10.13039/501100020871",
|
||||||
"name": "Bernal Institute, University of Limerick",
|
"name": "Bernal Institute, University of Limerick",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "501100023852",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100023852",
|
|
||||||
"name": "Moore Institute for Research in the Humanities and Social Studies, University of Galway",
|
|
||||||
"synonym": []
|
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -48,12 +48,37 @@
|
||||||
<description>Used to configure the heap size for the map JVM process. Should be 80% of mapreduce.map.memory.mb.</description>
|
<description>Used to configure the heap size for the map JVM process. Should be 80% of mapreduce.map.memory.mb.</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>JAVA_HOME</name>
|
||||||
|
<value>/srv/java/openjdk-17</value>
|
||||||
|
<description>Used to configure the Java home location for oozie.launcher.mapreduce.map.env</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>JAVA_OPTS</name>
|
||||||
|
<value>-Dcom.sun.security.enableAIAcaIssuers=true</value>
|
||||||
|
<description>Used to configure the JAVA_OPTS parameter</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<global>
|
<global>
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
<name-node>${nameNode}</name-node>
|
<name-node>${nameNode}</name-node>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>mapreduce.job.queuename</name>
|
||||||
|
<value>${queueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapred.job.queue.name</name>
|
||||||
|
<value>${oozieLauncherQueueName}</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.map.env</name>
|
||||||
|
<value>JAVA_HOME=${JAVA_HOME}</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="collection_mode"/>
|
<start to="collection_mode"/>
|
||||||
|
@ -99,7 +124,7 @@
|
||||||
<action name="CollectionWorker">
|
<action name="CollectionWorker">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.collection.CollectorWorkerApplication</main-class>
|
<main-class>eu.dnetlib.dhp.collection.CollectorWorkerApplication</main-class>
|
||||||
<java-opts>${collection_java_xmx}</java-opts>
|
<java-opts>${JAVA_OPTS} ${collection_java_xmx}</java-opts>
|
||||||
<arg>--apidescriptor</arg><arg>${apiDescription}</arg>
|
<arg>--apidescriptor</arg><arg>${apiDescription}</arg>
|
||||||
<arg>--namenode</arg><arg>${nameNode}</arg>
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
<arg>--workflowId</arg><arg>${workflowId}</arg>
|
<arg>--workflowId</arg><arg>${workflowId}</arg>
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
<workflow-app name="Transform_BioEntity_Workflow" xmlns="uri:oozie:workflow:0.5">
|
<workflow-app name="Transform_BioEntity_Workflow" xmlns="uri:oozie:workflow:0.5">
|
||||||
<parameters>
|
<parameters>
|
||||||
<property>
|
<property>
|
||||||
<name>sourcePath</name>
|
<name>sourcePath</name>
|
||||||
|
@ -8,19 +8,40 @@
|
||||||
<name>database</name>
|
<name>database</name>
|
||||||
<description>the PDB Database Working Path</description>
|
<description>the PDB Database Working Path</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>targetPath</name>
|
<name>mdStoreOutputId</name>
|
||||||
<description>the Target Working dir path</description>
|
<description>the identifier of the cleaned MDStore</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>mdStoreManagerURI</name>
|
||||||
|
<description>the path of the cleaned mdstore</description>
|
||||||
</property>
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="ConvertDB"/>
|
<start to="StartTransaction"/>
|
||||||
|
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="StartTransaction">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>NEW_VERSION</arg>
|
||||||
|
<arg>--mdStoreID</arg><arg>${mdStoreOutputId}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="ConvertDB"/>
|
||||||
|
<error to="RollBack"/>
|
||||||
|
</action>
|
||||||
<action name="ConvertDB">
|
<action name="ConvertDB">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -41,11 +62,48 @@
|
||||||
<arg>--master</arg><arg>yarn</arg>
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
<arg>--dbPath</arg><arg>${sourcePath}</arg>
|
<arg>--dbPath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--database</arg><arg>${database}</arg>
|
<arg>--database</arg><arg>${database}</arg>
|
||||||
<arg>--targetPath</arg><arg>${targetPath}</arg>
|
<arg>--mdstoreOutputVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="CommitVersion"/>
|
||||||
<error to="Kill"/>
|
<error to="RollBack"/>
|
||||||
|
|
||||||
</action>
|
</action>
|
||||||
<end name="End"/>
|
<action name="CommitVersion">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>COMMIT</arg>
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="RollBack">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>ROLLBACK</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="Kill"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -2,5 +2,5 @@
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
||||||
{"paramName":"db", "paramLongName":"database", "paramDescription": "should be PDB or UNIPROT", "paramRequired": true},
|
{"paramName":"db", "paramLongName":"database", "paramDescription": "should be PDB or UNIPROT", "paramRequired": true},
|
||||||
{"paramName":"p", "paramLongName":"dbPath", "paramDescription": "the path of the database to transform", "paramRequired": true},
|
{"paramName":"p", "paramLongName":"dbPath", "paramDescription": "the path of the database to transform", "paramRequired": true},
|
||||||
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the OAF target path ", "paramRequired": true}
|
{"paramName":"mo", "paramLongName":"mdstoreOutputVersion", "paramDescription": "the oaf path ", "paramRequired": true}
|
||||||
]
|
]
|
|
@ -1,5 +1,20 @@
|
||||||
[
|
[
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{
|
||||||
{"paramName":"s", "paramLongName":"sourcePath","paramDescription": "the source Path", "paramRequired": true},
|
"paramName": "mt",
|
||||||
{"paramName":"t", "paramLongName":"targetPath","paramDescription": "the oaf path ", "paramRequired": true}
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "should be local or yarn",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "s",
|
||||||
|
"paramLongName": "sourcePath",
|
||||||
|
"paramDescription": "the source Path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "mo",
|
||||||
|
"paramLongName": "mdstoreOutputVersion",
|
||||||
|
"paramDescription": "the oaf path ",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
]
|
]
|
|
@ -9,34 +9,26 @@
|
||||||
<description>the Working Path</description>
|
<description>the Working Path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>targetPath</name>
|
<name>mdStoreOutputId</name>
|
||||||
<description>the OAF MDStore Path</description>
|
<description>the identifier of the cleaned MDStore</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>sparkDriverMemory</name>
|
<name>mdStoreManagerURI</name>
|
||||||
<description>memory for driver process</description>
|
<description>the path of the cleaned mdstore</description>
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>resumeFrom</name>
|
<name>resumeFrom</name>
|
||||||
<value>DownloadEBILinks</value>
|
<value>CreateEBIDataSet</value>
|
||||||
<description>node to start</description>
|
<description>node to start</description>
|
||||||
</property>
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="resume_from"/>
|
<start to="StartTransaction"/>
|
||||||
|
|
||||||
<decision name="resume_from">
|
<decision name="resume_from">
|
||||||
<switch>
|
<switch>
|
||||||
<case to="DownloadEBILinks">${wf:conf('resumeFrom') eq 'DownloadEBILinks'}</case>
|
<case to="DownloadEBILinks">${wf:conf('resumeFrom') eq 'DownloadEBILinks'}</case>
|
||||||
<case to="CreateEBIDataSet">${wf:conf('resumeFrom') eq 'CreateEBIDataSet'}</case>
|
<case to="StartTransaction">${wf:conf('resumeFrom') eq 'CreateEBIDataSet'}</case>
|
||||||
<default to="DownloadEBILinks"/>
|
<default to="DownloadEBILinks"/>
|
||||||
</switch>
|
</switch>
|
||||||
</decision>
|
</decision>
|
||||||
|
@ -77,9 +69,29 @@
|
||||||
<move source="${sourcePath}/ebi_links_dataset" target="${sourcePath}/ebi_links_dataset_old"/>
|
<move source="${sourcePath}/ebi_links_dataset" target="${sourcePath}/ebi_links_dataset_old"/>
|
||||||
<move source="${workingPath}/links_final" target="${sourcePath}/ebi_links_dataset"/>
|
<move source="${workingPath}/links_final" target="${sourcePath}/ebi_links_dataset"/>
|
||||||
</fs>
|
</fs>
|
||||||
<ok to="CreateEBIDataSet"/>
|
<ok to="StartTransaction"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
<action name="StartTransaction">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>NEW_VERSION</arg>
|
||||||
|
<arg>--mdStoreID</arg><arg>${mdStoreOutputId}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="CreateEBIDataSet"/>
|
||||||
|
<error to="RollBack"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<action name="CreateEBIDataSet">
|
<action name="CreateEBIDataSet">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn-cluster</master>
|
<master>yarn-cluster</master>
|
||||||
|
@ -95,11 +107,49 @@
|
||||||
${sparkExtraOPT}
|
${sparkExtraOPT}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/ebi_links_dataset</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/ebi_links_dataset</arg>
|
||||||
<arg>--targetPath</arg><arg>${targetPath}</arg>
|
<arg>--mdstoreOutputVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
<arg>--master</arg><arg>yarn</arg>
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="CommitVersion">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>COMMIT</arg>
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="RollBack">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>ROLLBACK</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="Kill"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -79,23 +79,6 @@ object MagUtility extends Serializable {
|
||||||
private val MAGCollectedFrom = keyValue(ModelConstants.MAG_ID, ModelConstants.MAG_NAME)
|
private val MAGCollectedFrom = keyValue(ModelConstants.MAG_ID, ModelConstants.MAG_NAME)
|
||||||
|
|
||||||
private val MAGDataInfo: DataInfo = {
|
private val MAGDataInfo: DataInfo = {
|
||||||
val di = new DataInfo
|
|
||||||
di.setDeletedbyinference(false)
|
|
||||||
di.setInferred(false)
|
|
||||||
di.setInvisible(false)
|
|
||||||
di.setTrust("0.9")
|
|
||||||
di.setProvenanceaction(
|
|
||||||
OafMapperUtils.qualifier(
|
|
||||||
ModelConstants.SYSIMPORT_ACTIONSET,
|
|
||||||
ModelConstants.SYSIMPORT_ACTIONSET,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
|
||||||
ModelConstants.DNET_PROVENANCE_ACTIONS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
di
|
|
||||||
}
|
|
||||||
|
|
||||||
private val MAGDataInfoInvisible: DataInfo = {
|
|
||||||
val di = new DataInfo
|
val di = new DataInfo
|
||||||
di.setDeletedbyinference(false)
|
di.setDeletedbyinference(false)
|
||||||
di.setInferred(false)
|
di.setInferred(false)
|
||||||
|
@ -453,7 +436,6 @@ object MagUtility extends Serializable {
|
||||||
|
|
||||||
case "repository" =>
|
case "repository" =>
|
||||||
result = new Publication()
|
result = new Publication()
|
||||||
result.setDataInfo(MAGDataInfoInvisible)
|
|
||||||
qualifier(
|
qualifier(
|
||||||
"0038",
|
"0038",
|
||||||
"Other literature type",
|
"Other literature type",
|
||||||
|
@ -488,8 +470,7 @@ object MagUtility extends Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (result != null) {
|
if (result != null) {
|
||||||
if (result.getDataInfo == null)
|
result.setDataInfo(MAGDataInfo)
|
||||||
result.setDataInfo(MAGDataInfo)
|
|
||||||
val i = new Instance
|
val i = new Instance
|
||||||
i.setInstancetype(tp)
|
i.setInstancetype(tp)
|
||||||
i.setInstanceTypeMapping(
|
i.setInstanceTypeMapping(
|
||||||
|
@ -512,7 +493,7 @@ object MagUtility extends Serializable {
|
||||||
return null
|
return null
|
||||||
|
|
||||||
result.setCollectedfrom(List(MAGCollectedFrom).asJava)
|
result.setCollectedfrom(List(MAGCollectedFrom).asJava)
|
||||||
val pidList = List(
|
var pidList = List(
|
||||||
structuredProperty(
|
structuredProperty(
|
||||||
paper.paperId.get.toString,
|
paper.paperId.get.toString,
|
||||||
qualifier(
|
qualifier(
|
||||||
|
@ -525,8 +506,6 @@ object MagUtility extends Serializable {
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
result.setPid(pidList.asJava)
|
|
||||||
|
|
||||||
result.setOriginalId(pidList.map(s => s.getValue).asJava)
|
result.setOriginalId(pidList.map(s => s.getValue).asJava)
|
||||||
|
|
||||||
result.setId(s"50|mag_________::${DHPUtils.md5(paper.paperId.get.toString)}")
|
result.setId(s"50|mag_________::${DHPUtils.md5(paper.paperId.get.toString)}")
|
||||||
|
@ -618,22 +597,23 @@ object MagUtility extends Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
val instance = result.getInstance().get(0)
|
val instance = result.getInstance().get(0)
|
||||||
instance.setPid(pidList.asJava)
|
|
||||||
if (paper.doi.orNull != null)
|
if (paper.doi.orNull != null) {
|
||||||
instance.setAlternateIdentifier(
|
pidList = pidList ::: List(
|
||||||
List(
|
structuredProperty(
|
||||||
structuredProperty(
|
paper.doi.get,
|
||||||
paper.doi.get,
|
qualifier(
|
||||||
qualifier(
|
PidType.doi.toString,
|
||||||
PidType.doi.toString,
|
PidType.doi.toString,
|
||||||
PidType.doi.toString,
|
ModelConstants.DNET_PID_TYPES,
|
||||||
ModelConstants.DNET_PID_TYPES,
|
ModelConstants.DNET_PID_TYPES
|
||||||
ModelConstants.DNET_PID_TYPES
|
),
|
||||||
),
|
null
|
||||||
null
|
)
|
||||||
)
|
|
||||||
).asJava
|
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
instance.setPid(pidList.asJava)
|
||||||
|
result.setPid(pidList.asJava)
|
||||||
instance.setUrl(paper.urls.get.asJava)
|
instance.setUrl(paper.urls.get.asJava)
|
||||||
instance.setHostedby(ModelConstants.UNKNOWN_REPOSITORY)
|
instance.setHostedby(ModelConstants.UNKNOWN_REPOSITORY)
|
||||||
instance.setCollectedfrom(MAGCollectedFrom)
|
instance.setCollectedfrom(MAGCollectedFrom)
|
||||||
|
|
|
@ -38,6 +38,7 @@ class SparkMAGtoOAF(propertyPath: String, args: Array[String], log: Logger)
|
||||||
spark.read
|
spark.read
|
||||||
.load(s"$magBasePath/mag_denormalized")
|
.load(s"$magBasePath/mag_denormalized")
|
||||||
.as[MAGPaper]
|
.as[MAGPaper]
|
||||||
|
.filter(col("doi").isNotNull)
|
||||||
.map(s => MagUtility.convertMAGtoOAF(s))
|
.map(s => MagUtility.convertMAGtoOAF(s))
|
||||||
.filter(s => s != null)
|
.filter(s => s != null)
|
||||||
.write
|
.write
|
||||||
|
|
|
@ -231,7 +231,7 @@ object BioDBToOAF {
|
||||||
def uniprotToOAF(input: String): List[Oaf] = {
|
def uniprotToOAF(input: String): List[Oaf] = {
|
||||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
lazy val json = parse(input)
|
lazy val json = parse(input)
|
||||||
val pid = (json \ "pid").extract[String]
|
val pid = (json \ "pid").extract[String].trim()
|
||||||
|
|
||||||
val d = new Dataset
|
val d = new Dataset
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,15 @@ package eu.dnetlib.dhp.sx.bio
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
import eu.dnetlib.dhp.collection.CollectionUtils
|
import eu.dnetlib.dhp.collection.CollectionUtils
|
||||||
|
import eu.dnetlib.dhp.common.Constants.{MDSTORE_DATA_PATH, MDSTORE_SIZE_PATH}
|
||||||
|
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf
|
import eu.dnetlib.dhp.schema.oaf.Oaf
|
||||||
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils.{MAPPER, writeHdfsFile}
|
||||||
|
|
||||||
object SparkTransformBioDatabaseToOAF {
|
object SparkTransformBioDatabaseToOAF {
|
||||||
|
|
||||||
|
@ -25,8 +28,13 @@ object SparkTransformBioDatabaseToOAF {
|
||||||
|
|
||||||
val dbPath: String = parser.get("dbPath")
|
val dbPath: String = parser.get("dbPath")
|
||||||
log.info("dbPath: {}", database)
|
log.info("dbPath: {}", database)
|
||||||
val targetPath: String = parser.get("targetPath")
|
|
||||||
log.info("targetPath: {}", database)
|
val mdstoreOutputVersion = parser.get("mdstoreOutputVersion")
|
||||||
|
log.info("mdstoreOutputVersion: {}", mdstoreOutputVersion)
|
||||||
|
|
||||||
|
val cleanedMdStoreVersion = MAPPER.readValue(mdstoreOutputVersion, classOf[MDStoreVersion])
|
||||||
|
val outputBasePath = cleanedMdStoreVersion.getHdfsPath
|
||||||
|
log.info("outputBasePath: {}", outputBasePath)
|
||||||
|
|
||||||
val spark: SparkSession =
|
val spark: SparkSession =
|
||||||
SparkSession
|
SparkSession
|
||||||
|
@ -43,24 +51,28 @@ object SparkTransformBioDatabaseToOAF {
|
||||||
case "UNIPROT" =>
|
case "UNIPROT" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.uniprotToOAF(i))),
|
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.uniprotToOAF(i))),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
case "PDB" =>
|
case "PDB" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.pdbTOOaf(i))),
|
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.pdbTOOaf(i))),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
case "SCHOLIX" =>
|
case "SCHOLIX" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.read.load(dbPath).as[ScholixResolved].map(i => BioDBToOAF.scholixResolvedToOAF(i)),
|
spark.read.load(dbPath).as[ScholixResolved].map(i => BioDBToOAF.scholixResolvedToOAF(i)),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
case "CROSSREF_LINKS" =>
|
case "CROSSREF_LINKS" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.createDataset(sc.textFile(dbPath).map(i => BioDBToOAF.crossrefLinksToOaf(i))),
|
spark.createDataset(sc.textFile(dbPath).map(i => BioDBToOAF.crossrefLinksToOaf(i))),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
val df = spark.read.text(s"$outputBasePath/$MDSTORE_DATA_PATH")
|
||||||
|
val mdStoreSize = df.count
|
||||||
|
writeHdfsFile(spark.sparkContext.hadoopConfiguration, s"$mdStoreSize", s"$outputBasePath/$MDSTORE_SIZE_PATH")
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,9 @@ import org.apache.commons.io.IOUtils
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql._
|
import org.apache.spark.sql._
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
import eu.dnetlib.dhp.common.Constants.{MDSTORE_DATA_PATH, MDSTORE_SIZE_PATH}
|
||||||
|
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils.{MAPPER, writeHdfsFile}
|
||||||
|
|
||||||
object SparkEBILinksToOaf {
|
object SparkEBILinksToOaf {
|
||||||
|
|
||||||
|
@ -32,8 +35,13 @@ object SparkEBILinksToOaf {
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
val sourcePath = parser.get("sourcePath")
|
val sourcePath = parser.get("sourcePath")
|
||||||
log.info(s"sourcePath -> $sourcePath")
|
log.info(s"sourcePath -> $sourcePath")
|
||||||
val targetPath = parser.get("targetPath")
|
val mdstoreOutputVersion = parser.get("mdstoreOutputVersion")
|
||||||
log.info(s"targetPath -> $targetPath")
|
log.info("mdstoreOutputVersion: {}", mdstoreOutputVersion)
|
||||||
|
|
||||||
|
val cleanedMdStoreVersion = MAPPER.readValue(mdstoreOutputVersion, classOf[MDStoreVersion])
|
||||||
|
val outputBasePath = cleanedMdStoreVersion.getHdfsPath
|
||||||
|
log.info("outputBasePath: {}", outputBasePath)
|
||||||
|
|
||||||
implicit val PMEncoder: Encoder[Oaf] = Encoders.kryo(classOf[Oaf])
|
implicit val PMEncoder: Encoder[Oaf] = Encoders.kryo(classOf[Oaf])
|
||||||
|
|
||||||
val ebLinks: Dataset[EBILinkItem] = spark.read
|
val ebLinks: Dataset[EBILinkItem] = spark.read
|
||||||
|
@ -46,7 +54,10 @@ object SparkEBILinksToOaf {
|
||||||
.flatMap(j => BioDBToOAF.parse_ebi_links(j.links))
|
.flatMap(j => BioDBToOAF.parse_ebi_links(j.links))
|
||||||
.filter(p => BioDBToOAF.EBITargetLinksFilter(p))
|
.filter(p => BioDBToOAF.EBITargetLinksFilter(p))
|
||||||
.flatMap(p => BioDBToOAF.convertEBILinksToOaf(p)),
|
.flatMap(p => BioDBToOAF.convertEBILinksToOaf(p)),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
|
val df = spark.read.text(s"$outputBasePath/$MDSTORE_DATA_PATH")
|
||||||
|
val mdStoreSize = df.count
|
||||||
|
writeHdfsFile(spark.sparkContext.hadoopConfiguration, s"$mdStoreSize", s"$outputBasePath/$MDSTORE_SIZE_PATH")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -88,6 +88,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
"-pubmedInputPath", crossrefAffiliationRelationPath,
|
"-pubmedInputPath", crossrefAffiliationRelationPath,
|
||||||
"-openapcInputPath", crossrefAffiliationRelationPath,
|
"-openapcInputPath", crossrefAffiliationRelationPath,
|
||||||
"-dataciteInputPath", crossrefAffiliationRelationPath,
|
"-dataciteInputPath", crossrefAffiliationRelationPath,
|
||||||
|
"-webCrawlInputPath", crossrefAffiliationRelationPath,
|
||||||
"-outputPath", outputPath
|
"-outputPath", outputPath
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -104,7 +105,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
// );
|
// );
|
||||||
// }
|
// }
|
||||||
// count the number of relations
|
// count the number of relations
|
||||||
assertEquals(80, tmp.count());
|
assertEquals(120, tmp.count());
|
||||||
|
|
||||||
Dataset<Relation> dataset = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
Dataset<Relation> dataset = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
dataset.createOrReplaceTempView("result");
|
dataset.createOrReplaceTempView("result");
|
||||||
|
@ -115,7 +116,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
// verify that we have equal number of bi-directional relations
|
// verify that we have equal number of bi-directional relations
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
40, execVerification
|
60, execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"relClass='" + ModelConstants.HAS_AUTHOR_INSTITUTION + "'")
|
"relClass='" + ModelConstants.HAS_AUTHOR_INSTITUTION + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
|
@ -123,7 +124,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
40, execVerification
|
60, execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"relClass='" + ModelConstants.IS_AUTHOR_INSTITUTION_OF + "'")
|
"relClass='" + ModelConstants.IS_AUTHOR_INSTITUTION_OF + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
|
|
|
@ -75,7 +75,11 @@ public class CreateASTest {
|
||||||
|
|
||||||
String inputPath = getClass()
|
String inputPath = getClass()
|
||||||
.getResource(
|
.getResource(
|
||||||
"/eu/dnetlib/dhp/actionmanager/webcrawl/")
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/input/")
|
||||||
|
.getPath();
|
||||||
|
String blackListPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/blackList/")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
CreateActionSetFromWebEntries
|
CreateActionSetFromWebEntries
|
||||||
|
@ -86,7 +90,8 @@ public class CreateASTest {
|
||||||
"-sourcePath",
|
"-sourcePath",
|
||||||
inputPath,
|
inputPath,
|
||||||
"-outputPath",
|
"-outputPath",
|
||||||
workingDir.toString() + "/actionSet1"
|
workingDir.toString() + "/actionSet1",
|
||||||
|
"-blackListPath", blackListPath
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
@ -96,7 +101,7 @@ public class CreateASTest {
|
||||||
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
.map(aa -> ((Relation) aa.getPayload()));
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
Assertions.assertEquals(64, tmp.count());
|
Assertions.assertEquals(58, tmp.count());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,6 +114,10 @@ public class CreateASTest {
|
||||||
.getResource(
|
.getResource(
|
||||||
"/eu/dnetlib/dhp/actionmanager/webcrawl/")
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
String blackListPath = getClass()
|
||||||
|
.getResource(
|
||||||
|
"/eu/dnetlib/dhp/actionmanager/webcrawl/blackList/")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
CreateActionSetFromWebEntries
|
CreateActionSetFromWebEntries
|
||||||
.main(
|
.main(
|
||||||
|
@ -118,7 +127,8 @@ public class CreateASTest {
|
||||||
"-sourcePath",
|
"-sourcePath",
|
||||||
inputPath,
|
inputPath,
|
||||||
"-outputPath",
|
"-outputPath",
|
||||||
workingDir.toString() + "/actionSet1"
|
workingDir.toString() + "/actionSet1",
|
||||||
|
"-blackListPath", blackListPath
|
||||||
});
|
});
|
||||||
|
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
@ -184,7 +194,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
5, tmp
|
2, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getSource()
|
.getSource()
|
||||||
|
@ -197,7 +207,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
5, tmp
|
2, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getTarget()
|
.getTarget()
|
||||||
|
@ -210,7 +220,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2, tmp
|
1, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getTarget()
|
.getTarget()
|
||||||
|
@ -224,7 +234,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
2, tmp
|
1, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getTarget()
|
.getTarget()
|
||||||
|
@ -238,7 +248,7 @@ public class CreateASTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
1, tmp
|
0, tmp
|
||||||
.filter(
|
.filter(
|
||||||
r -> r
|
r -> r
|
||||||
.getTarget()
|
.getTarget()
|
||||||
|
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.collection.plugin.rest;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
import org.junit.jupiter.api.Assertions;
|
||||||
|
@ -69,7 +70,7 @@ public class OsfPreprintCollectorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Disabled
|
@Disabled
|
||||||
void test() throws CollectorException {
|
void test_limited() throws CollectorException {
|
||||||
final AtomicInteger i = new AtomicInteger(0);
|
final AtomicInteger i = new AtomicInteger(0);
|
||||||
final Stream<String> stream = this.rcp.collect(this.api, new AggregatorReport());
|
final Stream<String> stream = this.rcp.collect(this.api, new AggregatorReport());
|
||||||
|
|
||||||
|
@ -82,4 +83,23 @@ public class OsfPreprintCollectorTest {
|
||||||
log.info("{}", i.intValue());
|
log.info("{}", i.intValue());
|
||||||
Assertions.assertTrue(i.intValue() > 0);
|
Assertions.assertTrue(i.intValue() > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
@Disabled
|
||||||
|
void test_all() throws CollectorException {
|
||||||
|
final AtomicLong i = new AtomicLong(0);
|
||||||
|
final Stream<String> stream = this.rcp.collect(this.api, new AggregatorReport());
|
||||||
|
|
||||||
|
stream.forEach(s -> {
|
||||||
|
Assertions.assertTrue(s.length() > 0);
|
||||||
|
if ((i.incrementAndGet() % 1000) == 0) {
|
||||||
|
log.info("COLLECTED: {}", i.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
log.info("TOTAL: {}", i.get());
|
||||||
|
Assertions.assertTrue(i.get() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
package eu.dnetlib.dhp.transformation.xslt;
|
||||||
|
|
||||||
|
import org.json.JSONObject;
|
||||||
|
import org.junit.jupiter.api.AfterEach;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.MalformedURLException;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
import java.net.URL;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.*;
|
||||||
|
|
||||||
|
class DataFetcherTest {
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterEach
|
||||||
|
void tearDown() {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getJson() throws IOException, URISyntaxException {
|
||||||
|
URL contributorsUrl = new URI("https://api.osf.io/v2/preprints/mrwqb/contributors/?format=json").toURL();
|
||||||
|
JSONObject testJsonObj = DataFetcher.getJson(contributorsUrl);
|
||||||
|
|
||||||
|
String x = testJsonObj
|
||||||
|
.getJSONArray("data")
|
||||||
|
.getJSONObject(0)
|
||||||
|
.getJSONObject("embeds")
|
||||||
|
.getJSONObject("users")
|
||||||
|
.getJSONObject("data")
|
||||||
|
.getJSONObject("attributes")
|
||||||
|
.getString("full_name");
|
||||||
|
System.out.println(x);
|
||||||
|
System.out.println(testJsonObj.getJSONArray("data").length());
|
||||||
|
testJsonObj.getJSONArray("data").forEach(System.out::println);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAuthorsFromJson() throws IOException, URISyntaxException {
|
||||||
|
URL contributorsUrl = new URI("https://api.osf.io/v2/preprints/mrwqb/contributors/?format=json").toURL();
|
||||||
|
JSONObject testJsonObj = DataFetcher.getJson(contributorsUrl);
|
||||||
|
List<String> authors = DataFetcher.getAuthorsFromJson(testJsonObj);
|
||||||
|
System.out.println(authors);
|
||||||
|
System.out.println(DataFetcher.transformListToDublinCore(authors));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getAndTransformAuthors() throws IOException, URISyntaxException {
|
||||||
|
URL contributorsUrl = new URI("https://api.osf.io/v2/preprints/mrwqb/contributors/?format=json").toURL();
|
||||||
|
System.out.println(DataFetcher.getAndTransformAuthors(contributorsUrl));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void getLinkToFulltextFromJson() throws URISyntaxException, IOException {
|
||||||
|
URL linkToFullTextDocument = new URI("https://api.osf.io/v2/files/5de7c96f84c479000c7928af/?format=json").toURL();
|
||||||
|
System.out.println(DataFetcher.getFullTextLinkAndTransform(linkToFullTextDocument));
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -4,4 +4,6 @@
|
||||||
{"DOI":"10.1142\/s021821650200186x","Matchings":[{"RORid":"https:\/\/ror.org\/035xkbk20","Confidence":1},{"RORid":"https:\/\/ror.org\/05apxxy63","Confidence":1}]}
|
{"DOI":"10.1142\/s021821650200186x","Matchings":[{"RORid":"https:\/\/ror.org\/035xkbk20","Confidence":1},{"RORid":"https:\/\/ror.org\/05apxxy63","Confidence":1}]}
|
||||||
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(575)","Matchings":[{"RORid":"https:\/\/ror.org\/04j198w64","Confidence":0.82}]}
|
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(575)","Matchings":[{"RORid":"https:\/\/ror.org\/04j198w64","Confidence":0.82}]}
|
||||||
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(588)","Matchings":[{"RORid":"https:\/\/ror.org\/03m8km719","Confidence":0.8660254038},{"RORid":"https:\/\/ror.org\/02aze4h65","Confidence":0.87}]}
|
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(588)","Matchings":[{"RORid":"https:\/\/ror.org\/03m8km719","Confidence":0.8660254038},{"RORid":"https:\/\/ror.org\/02aze4h65","Confidence":0.87}]}
|
||||||
{"DOI":"10.1161\/hy0202.103001","Matchings":[{"RORid":"https:\/\/ror.org\/057xtrt18","Confidence":0.7071067812}]}
|
{"DOI":"10.1161\/hy0202.103001","Matchings":[{"RORid":"https:\/\/ror.org\/057xtrt18","Confidence":0.7071067812}]}
|
||||||
|
{"DOI": "10.1080/13669877.2015.1042504", "Matchings": [{"Confidence": 1.0, "RORid": "https://ror.org/03265fv13"}]}
|
||||||
|
{"DOI": "10.1007/3-540-47984-8_14", "Matchings": [{"Confidence": 1.0, "RORid": "https://ror.org/00a0n9e72"}]}
|
|
@ -1,15 +1,44 @@
|
||||||
{"pdb": "1CW0", "title": "crystal structure analysis of very short patch repair (vsr) endonuclease in complex with a duplex dna", "authors": ["S.E.Tsutakawa", "H.Jingami", "K.Morikawa"], "doi": "10.1016/S0092-8674(00)81550-0", "pmid": "10612397"}
|
{"classification": "Signaling protein", "pdb": "5NM4", "deposition_date": "2017-04-05", "title": "A2a adenosine receptor room-temperature structure determined by serial Femtosecond crystallography", "Keywords": ["Oom-temperature", " serial crystallography", " signaling protein"], "authors": ["T.weinert", "R.cheng", "D.james", "D.gashi", "P.nogly", "K.jaeger", "M.hennig", "", "J.standfuss"], "pmid": "28912485", "doi": "10.1038/S41467-017-00630-4"}
|
||||||
{"pdb": "2CWW", "title": "crystal structure of thermus thermophilus ttha1280, a putative sam- dependent rna methyltransferase, in complex with s-adenosyl-l- homocysteine", "authors": ["A.A.Pioszak", "K.Murayama", "N.Nakagawa", "A.Ebihara", "S.Kuramitsu", "M.Shirouzu", "S.Yokoyama", "Riken Structural Genomics/proteomics Initiative (Rsgi)"], "doi": "10.1107/S1744309105029842", "pmid": "16511182"}
|
{"classification": "Oxidoreductase/oxidoreductase inhibitor", "pdb": "4KN3", "deposition_date": "2013-05-08", "title": "Structure of the y34ns91g double mutant of dehaloperoxidase from Amphitrite ornata with 2,4,6-trichlorophenol", "Keywords": ["Lobin", " oxygen storage", " peroxidase", " oxidoreductase", " oxidoreductase-", "Oxidoreductase inhibitor complex"], "authors": ["C.wang", "L.lovelace", "L.lebioda"], "pmid": "23952341", "doi": "10.1021/BI400627W"}
|
||||||
{"pdb": "6CWE", "title": "structure of alpha-gsa[8,6p] bound by cd1d and in complex with the va14vb8.2 tcr", "authors": ["J.Wang", "D.Zajonc"], "doi": null, "pmid": null}
|
{"classification": "Transport protein", "pdb": "8HKM", "deposition_date": "2022-11-27", "title": "Ion channel", "Keywords": ["On channel", " transport protein"], "authors": ["D.h.jiang", "J.t.zhang"], "pmid": "37494189", "doi": "10.1016/J.CELREP.2023.112858"}
|
||||||
{"pdb": "5CWS", "title": "crystal structure of the intact chaetomium thermophilum nsp1-nup49- nup57 channel nucleoporin heterotrimer bound to its nic96 nuclear pore complex attachment site", "authors": ["C.J.Bley", "S.Petrovic", "M.Paduch", "V.Lu", "A.A.Kossiakoff", "A.Hoelz"], "doi": "10.1126/SCIENCE.AAC9176", "pmid": "26316600"}
|
{"classification": "Signaling protein", "pdb": "6JT1", "deposition_date": "2019-04-08", "title": "Structure of human soluble guanylate cyclase in the heme oxidised State", "Keywords": ["Oluble guanylate cyclase", " signaling protein"], "authors": ["L.chen", "Y.kang", "R.liu", "J.-x.wu"], "pmid": "31514202", "doi": "10.1038/S41586-019-1584-6"}
|
||||||
{"pdb": "5CWE", "title": "structure of cyp107l2 from streptomyces avermitilis with lauric acid", "authors": ["T.-V.Pham", "S.-H.Han", "J.-H.Kim", "D.-H.Kim", "L.-W.Kang"], "doi": null, "pmid": null}
|
{"classification": "Immune system", "pdb": "7OW6", "deposition_date": "2021-06-16", "title": "Crystal structure of a tcr in complex with hla-a*11:01 bound to kras G12d peptide (vvvgadgvgk)", "Keywords": ["La", " kras", " tcr", " immune system"], "authors": ["V.karuppiah", "R.a.robinson"], "doi": "10.1038/S41467-022-32811-1"}
|
||||||
{"pdb": "7CW4", "title": "acetyl-coa acetyltransferase from bacillus cereus atcc 14579", "authors": ["J.Hong", "K.J.Kim"], "doi": "10.1016/J.BBRC.2020.09.048", "pmid": "32972748"}
|
{"classification": "Biosynthetic protein", "pdb": "5EQ8", "deposition_date": "2015-11-12", "title": "Crystal structure of medicago truncatula histidinol-phosphate Phosphatase (mthpp) in complex with l-histidinol", "Keywords": ["Istidine biosynthesis", " metabolic pathways", " dimer", " plant", "", "Biosynthetic protein"], "authors": ["M.ruszkowski", "Z.dauter"], "pmid": "26994138", "doi": "10.1074/JBC.M115.708727"}
|
||||||
{"pdb": "2CWP", "title": "crystal structure of metrs related protein from pyrococcus horikoshii", "authors": ["K.Murayama", "M.Kato-Murayama", "M.Shirouzu", "S.Yokoyama", "Riken StructuralGenomics/proteomics Initiative (Rsgi)"], "doi": null, "pmid": null}
|
{"classification": "De novo protein", "pdb": "8CWA", "deposition_date": "2022-05-18", "title": "Solution nmr structure of 8-residue rosetta-designed cyclic peptide D8.21 in cdcl3 with cis/trans switching (tc conformation, 53%)", "Keywords": ["Yclic peptide", " non natural amino acids", " cis/trans", " switch peptides", "", "De novo design", "Membrane permeability", "De novo protein"], "authors": ["T.a.ramelot", "R.tejero", "G.t.montelione"], "pmid": "36041435", "doi": "10.1016/J.CELL.2022.07.019"}
|
||||||
{"pdb": "2CW7", "title": "crystal structure of intein homing endonuclease ii", "authors": ["H.Matsumura", "H.Takahashi", "T.Inoue", "H.Hashimoto", "M.Nishioka", "S.Fujiwara", "M.Takagi", "T.Imanaka", "Y.Kai"], "doi": "10.1002/PROT.20858", "pmid": "16493661"}
|
{"classification": "Hydrolase", "pdb": "3R6M", "deposition_date": "2011-03-21", "title": "Crystal structure of vibrio parahaemolyticus yeaz", "Keywords": ["Ctin/hsp70 nucleotide-binding fold", " bacterial resuscitation", " viable", "But non-culturable state", "Resuscitation promoting factor", "Ygjd", "", "Yjee", "Vibrio parahaemolyticus", "Hydrolase"], "authors": ["A.roujeinikova", "I.aydin"], "pmid": "21858042", "doi": "10.1371/JOURNAL.PONE.0023245"}
|
||||||
{"pdb": "1CWU", "title": "brassica napus enoyl acp reductase a138g mutant complexed with nad+ and thienodiazaborine", "authors": ["A.Roujeinikova", "J.B.Rafferty", "D.W.Rice"], "doi": "10.1074/JBC.274.43.30811", "pmid": "10521472"}
|
{"classification": "Hydrolase", "pdb": "2W5J", "deposition_date": "2008-12-10", "title": "Structure of the c14-rotor ring of the proton translocating Chloroplast atp synthase", "Keywords": ["Ydrolase", " chloroplast", " atp synthase", " lipid-binding", " cf(0)", " membrane", "", "Transport", "Formylation", "Energy transduction", "Hydrogen ion transport", "", "Ion transport", "Transmembrane", "Membrane protein"], "authors": ["M.vollmar", "D.schlieper", "M.winn", "C.buechner", "G.groth"], "pmid": "19423706", "doi": "10.1074/JBC.M109.006916"}
|
||||||
{"pdb": "3CWN", "title": "escherichia coli transaldolase b mutant f178y", "authors": ["T.Sandalova", "G.Schneider", "A.Samland"], "doi": "10.1074/JBC.M803184200", "pmid": "18687684"}
|
{"classification": "De novo protein", "pdb": "4GLU", "deposition_date": "2012-08-14", "title": "Crystal structure of the mirror image form of vegf-a", "Keywords": ["-protein", " covalent dimer", " cysteine knot protein", " growth factor", " de", "Novo protein"], "authors": ["K.mandal", "M.uppalapati", "D.ault-riche", "J.kenney", "J.lowitz", "S.sidhu", "", "S.b.h.kent"], "pmid": "22927390", "doi": "10.1073/PNAS.1210483109"}
|
||||||
{"pdb": "1CWL", "title": "human cyclophilin a complexed with 4 4-hydroxy-meleu cyclosporin", "authors": ["V.Mikol", "J.Kallen", "P.Taylor", "M.D.Walkinshaw"], "doi": "10.1006/JMBI.1998.2108", "pmid": "9769216"}
|
{"classification": "Hydrolase/hydrolase inhibitor", "pdb": "3WYL", "deposition_date": "2014-09-01", "title": "Crystal structure of the catalytic domain of pde10a complexed with 5- Methoxy-3-(1-phenyl-1h-pyrazol-5-yl)-1-(3-(trifluoromethyl)phenyl) Pyridazin-4(1h)-one", "Keywords": ["Ydrolase-hydrolase inhibitor complex"], "authors": ["H.oki", "Y.hayano"], "pmid": "25384088", "doi": "10.1021/JM5013648"}
|
||||||
{"pdb": "3CW2", "title": "crystal structure of the intact archaeal translation initiation factor 2 from sulfolobus solfataricus .", "authors": ["E.A.Stolboushkina", "S.V.Nikonov", "A.D.Nikulin", "U.Blaesi", "D.J.Manstein", "R.V.Fedorov", "M.B.Garber", "O.S.Nikonov"], "doi": "10.1016/J.JMB.2008.07.039", "pmid": "18675278"}
|
{"classification": "Isomerase", "pdb": "5BOR", "deposition_date": "2015-05-27", "title": "Structure of acetobacter aceti pure-s57c, sulfonate form", "Keywords": ["Cidophile", " pure", " purine biosynthesis", " isomerase"], "authors": ["K.l.sullivan", "T.j.kappock"]}
|
||||||
{"pdb": "3CW9", "title": "4-chlorobenzoyl-coa ligase/synthetase in the thioester-forming conformation, bound to 4-chlorophenacyl-coa", "authors": ["A.S.Reger", "J.Cao", "R.Wu", "D.Dunaway-Mariano", "A.M.Gulick"], "doi": "10.1021/BI800696Y", "pmid": "18620418"}
|
{"classification": "Hydrolase", "pdb": "1X0C", "deposition_date": "2005-03-17", "title": "Improved crystal structure of isopullulanase from aspergillus niger Atcc 9642", "Keywords": ["Ullulan", " glycoside hydrolase family 49", " glycoprotein", " hydrolase"], "authors": ["M.mizuno", "T.tonozuka", "A.yamamura", "Y.miyasaka", "H.akeboshi", "S.kamitori", "", "A.nishikawa", "Y.sakano"], "pmid": "18155243", "doi": "10.1016/J.JMB.2007.11.098"}
|
||||||
{"pdb": "3CWU", "title": "crystal structure of an alka host/guest complex 2'-fluoro-2'-deoxy-1, n6-ethenoadenine:thymine base pair", "authors": ["B.R.Bowman", "S.Lee", "S.Wang", "G.L.Verdine"], "doi": "10.1016/J.STR.2008.04.012", "pmid": "18682218"}
|
{"classification": "Oxidoreductase", "pdb": "7CUP", "deposition_date": "2020-08-23", "title": "Structure of 2,5-dihydroxypridine dioxygenase from pseudomonas putida Kt2440", "Keywords": ["On-heme dioxygenase", " oxidoreductase"], "authors": ["G.q.liu", "H.z.tang"]}
|
||||||
{"pdb": "5CWF", "title": "crystal structure of de novo designed helical repeat protein dhr8", "authors": ["G.Bhabha", "D.C.Ekiert"], "doi": "10.1038/NATURE16162", "pmid": "26675729"}
|
{"classification": "Ligase", "pdb": "1VCN", "deposition_date": "2004-03-10", "title": "Crystal structure of t.th. hb8 ctp synthetase complex with sulfate Anion", "Keywords": ["Etramer", " riken structural genomics/proteomics initiative", " rsgi", "", "Structural genomics", "Ligase"], "authors": ["M.goto", "Riken structural genomics/proteomics initiative (rsgi)"], "pmid": "15296735", "doi": "10.1016/J.STR.2004.05.013"}
|
||||||
|
{"classification": "Transferase/transferase inhibitor", "pdb": "6C9V", "deposition_date": "2018-01-28", "title": "Mycobacterium tuberculosis adenosine kinase bound to (2r,3s,4r,5r)-2- (hydroxymethyl)-5-(6-(4-phenylpiperazin-1-yl)-9h-purin-9-yl) Tetrahydrofuran-3,4-diol", "Keywords": ["Ucleoside analog", " complex", " inhibitor", " structural genomics", " psi-2", "", "Protein structure initiative", "Tb structural genomics consortium", "", "Tbsgc", "Transferase-transferase inhibitor complex"], "authors": ["R.a.crespo", "Tb structural genomics consortium (tbsgc)"], "pmid": "31002508", "doi": "10.1021/ACS.JMEDCHEM.9B00020"}
|
||||||
|
{"classification": "De novo protein", "pdb": "4LPY", "deposition_date": "2013-07-16", "title": "Crystal structure of tencon variant g10", "Keywords": ["Ibronectin type iii fold", " alternate scaffold", " de novo protein"], "authors": ["A.teplyakov", "G.obmolova", "G.l.gilliland"], "pmid": "24375666", "doi": "10.1002/PROT.24502"}
|
||||||
|
{"classification": "Isomerase", "pdb": "2Y88", "deposition_date": "2011-02-03", "title": "Crystal structure of mycobacterium tuberculosis phosphoribosyl Isomerase (variant d11n) with bound prfar", "Keywords": ["Romatic amino acid biosynthesis", " isomerase", " tim-barrel", " histidine", "Biosynthesis", "Tryptophan biosynthesis"], "authors": ["J.kuper", "A.v.due", "A.geerlof", "M.wilmanns"], "pmid": "21321225", "doi": "10.1073/PNAS.1015996108"}
|
||||||
|
{"classification": "Unknown function", "pdb": "1SR0", "deposition_date": "2004-03-22", "title": "Crystal structure of signalling protein from sheep(sps-40) at 3.0a Resolution using crystal grown in the presence of polysaccharides", "Keywords": ["Ignalling protein", " involution", " unknown function"], "authors": ["D.b.srivastava", "A.s.ethayathulla", "N.singh", "J.kumar", "S.sharma", "T.p.singh"]}
|
||||||
|
{"classification": "Dna binding protein", "pdb": "3RH2", "deposition_date": "2011-04-11", "title": "Crystal structure of a tetr-like transcriptional regulator (sama_0099) From shewanella amazonensis sb2b at 2.42 a resolution", "Keywords": ["Na/rna-binding 3-helical bundle", " structural genomics", " joint center", "For structural genomics", "Jcsg", "Protein structure initiative", "Psi-", "Biology", "Dna binding protein"], "authors": ["Joint center for structural genomics (jcsg)"]}
|
||||||
|
{"classification": "Transferase", "pdb": "2WK5", "deposition_date": "2009-06-05", "title": "Structural features of native human thymidine phosphorylase And in complex with 5-iodouracil", "Keywords": ["Lycosyltransferase", " developmental protein", " angiogenesis", "", "5-iodouracil", "Growth factor", "Enzyme kinetics", "", "Differentiation", "Disease mutation", "Thymidine", "Phosphorylase", "Chemotaxis", "Transferase", "Mutagenesis", "", "Polymorphism"], "authors": ["E.mitsiki", "A.c.papageorgiou", "S.iyer", "N.thiyagarajan", "S.h.prior", "", "D.sleep", "C.finnis", "K.r.acharya"], "pmid": "19555658", "doi": "10.1016/J.BBRC.2009.06.104"}
|
||||||
|
{"classification": "Hydrolase", "pdb": "3P9Y", "deposition_date": "2010-10-18", "title": "Crystal structure of the drosophila melanogaster ssu72-pctd complex", "Keywords": ["Hosphatase", " cis proline", " lmw ptp-like fold", " rna polymerase ii ctd", "", "Hydrolase"], "authors": ["J.w.werner-allen", "P.zhou"], "pmid": "21159777", "doi": "10.1074/JBC.M110.197129"}
|
||||||
|
{"classification": "Recombination/dna", "pdb": "6OEO", "deposition_date": "2019-03-27", "title": "Cryo-em structure of mouse rag1/2 nfc complex (dna1)", "Keywords": ["(d)j recombination", " dna transposition", " rag", " scid", " recombination", "", "Recombination-dna complex"], "authors": ["X.chen", "Y.cui", "Z.h.zhou", "W.yang", "M.gellert"], "pmid": "32015552", "doi": "10.1038/S41594-019-0363-2"}
|
||||||
|
{"classification": "Hydrolase", "pdb": "4ECA", "deposition_date": "1997-02-21", "title": "Asparaginase from e. coli, mutant t89v with covalently bound aspartate", "Keywords": ["Ydrolase", " acyl-enzyme intermediate", " threonine amidohydrolase"], "authors": ["G.j.palm", "J.lubkowski", "A.wlodawer"], "pmid": "8706862", "doi": "10.1016/0014-5793(96)00660-6"}
|
||||||
|
{"classification": "Transcription/protein binding", "pdb": "3UVX", "deposition_date": "2011-11-30", "title": "Crystal structure of the first bromodomain of human brd4 in complex With a diacetylated histone 4 peptide (h4k12ack16ac)", "Keywords": ["Romodomain", " bromodomain containing protein 4", " cap", " hunk1", " mcap", "", "Mitotic chromosome associated protein", "Peptide complex", "Structural", "Genomics consortium", "Sgc", "Transcription-protein binding complex"], "authors": ["P.filippakopoulos", "S.picaud", "T.keates", "E.ugochukwu", "F.von delft", "", "C.h.arrowsmith", "A.m.edwards", "J.weigelt", "C.bountra", "S.knapp", "Structural", "Genomics consortium (sgc)"], "pmid": "22464331", "doi": "10.1016/J.CELL.2012.02.013"}
|
||||||
|
{"classification": "Membrane protein", "pdb": "1TLZ", "deposition_date": "2004-06-10", "title": "Tsx structure complexed with uridine", "Keywords": ["Ucleoside transporter", " beta barrel", " uridine", " membrane", "Protein"], "authors": ["J.ye", "B.van den berg"], "pmid": "15272310", "doi": "10.1038/SJ.EMBOJ.7600330"}
|
||||||
|
{"classification": "Dna binding protein", "pdb": "7AZD", "deposition_date": "2020-11-16", "title": "Dna polymerase sliding clamp from escherichia coli with peptide 20 Bound", "Keywords": ["Ntibacterial drug", " dna binding protein"], "authors": ["C.monsarrat", "G.compain", "C.andre", "I.martiel", "S.engilberge", "V.olieric", "", "P.wolff", "K.brillet", "M.landolfo", "C.silva da veiga", "J.wagner", "G.guichard", "", "D.y.burnouf"], "pmid": "34806883", "doi": "10.1021/ACS.JMEDCHEM.1C00918"}
|
||||||
|
{"classification": "Transferase", "pdb": "5N3K", "deposition_date": "2017-02-08", "title": "Camp-dependent protein kinase a from cricetulus griseus in complex With fragment like molecule o-guanidino-l-homoserine", "Keywords": ["Ragment", " complex", " transferase", " serine threonine kinase", " camp", "", "Kinase", "Pka"], "authors": ["C.siefker", "A.heine", "G.klebe"]}
|
||||||
|
{"classification": "Biosynthetic protein", "pdb": "8H52", "deposition_date": "2022-10-11", "title": "Crystal structure of helicobacter pylori carboxyspermidine Dehydrogenase in complex with nadp", "Keywords": ["Arboxyspermidine dehydrogenase", " biosynthetic protein"], "authors": ["K.y.ko", "S.c.park", "S.y.cho", "S.i.yoon"], "pmid": "36283333", "doi": "10.1016/J.BBRC.2022.10.049"}
|
||||||
|
{"classification": "Metal binding protein", "pdb": "6DYC", "deposition_date": "2018-07-01", "title": "Co(ii)-bound structure of the engineered cyt cb562 variant, ch3", "Keywords": ["Esigned protein", " 4-helix bundle", " electron transport", " metal binding", "Protein"], "authors": ["F.a.tezcan", "J.rittle"], "pmid": "30778140", "doi": "10.1038/S41557-019-0218-9"}
|
||||||
|
{"classification": "Protein fibril", "pdb": "6A6B", "deposition_date": "2018-06-27", "title": "Cryo-em structure of alpha-synuclein fiber", "Keywords": ["Lpha-syn fiber", " parkinson disease", " protein fibril"], "authors": ["Y.w.li", "C.y.zhao", "F.luo", "Z.liu", "X.gui", "Z.luo", "X.zhang", "D.li", "C.liu", "X.li"], "pmid": "30065316", "doi": "10.1038/S41422-018-0075-X"}
|
||||||
|
{"classification": "Dna", "pdb": "7D5E", "deposition_date": "2020-09-25", "title": "Left-handed g-quadruplex containing two bulges", "Keywords": ["-quadruplex", " bulge", " dna", " left-handed"], "authors": ["P.das", "A.maity", "K.h.ngo", "F.r.winnerdy", "B.bakalar", "Y.mechulam", "E.schmitt", "", "A.t.phan"], "pmid": "33503265", "doi": "10.1093/NAR/GKAA1259"}
|
||||||
|
{"classification": "Transferase", "pdb": "3RSY", "deposition_date": "2011-05-02", "title": "Cellobiose phosphorylase from cellulomonas uda in complex with sulfate And glycerol", "Keywords": ["H94", " alpha barrel", " cellobiose phosphorylase", " disaccharide", "Phosphorylase", "Transferase"], "authors": ["A.van hoorebeke", "J.stout", "W.soetaert", "J.van beeumen", "T.desmet", "S.savvides"]}
|
||||||
|
{"classification": "Oxidoreductase", "pdb": "7MCI", "deposition_date": "2021-04-02", "title": "Mofe protein from azotobacter vinelandii with a sulfur-replenished Cofactor", "Keywords": ["Zotobacter vinelandii", " mofe-protein", " nitrogenase", " oxidoreductase"], "authors": ["W.kang", "C.lee", "Y.hu", "M.w.ribbe"], "doi": "10.1038/S41929-022-00782-7"}
|
||||||
|
{"classification": "Dna", "pdb": "1XUW", "deposition_date": "2004-10-26", "title": "Structural rationalization of a large difference in rna affinity Despite a small difference in chemistry between two 2'-o-modified Nucleic acid analogs", "Keywords": ["Na mimetic methylcarbamate amide analog", " dna"], "authors": ["R.pattanayek", "L.sethaphong", "C.pan", "M.prhavc", "T.p.prakash", "M.manoharan", "", "M.egli"], "pmid": "15547979", "doi": "10.1021/JA044637K"}
|
||||||
|
{"classification": "Lyase", "pdb": "7C0D", "deposition_date": "2020-05-01", "title": "Crystal structure of azospirillum brasilense l-2-keto-3-deoxyarabonate Dehydratase (hydroxypyruvate-bound form)", "Keywords": ["-2-keto-3-deoxyarabonate dehydratase", " lyase"], "authors": ["Y.watanabe", "S.watanabe"], "pmid": "32697085", "doi": "10.1021/ACS.BIOCHEM.0C00515"}
|
||||||
|
{"classification": "Signaling protein", "pdb": "5LYK", "deposition_date": "2016-09-28", "title": "Crystal structure of intracellular b30.2 domain of btn3a1 bound to Citrate", "Keywords": ["30.2", " butyrophilin", " signaling protein"], "authors": ["F.mohammed", "A.t.baker", "M.salim", "B.e.willcox"], "pmid": "28862425", "doi": "10.1021/ACSCHEMBIO.7B00694"}
|
||||||
|
{"classification": "Toxin", "pdb": "4IZL", "deposition_date": "2013-01-30", "title": "Structure of the n248a mutant of the panton-valentine leucocidin s Component from staphylococcus aureus", "Keywords": ["I-component leucotoxin", " staphylococcus aureus", " s component", "Leucocidin", "Beta-barrel pore forming toxin", "Toxin"], "authors": ["L.maveyraud", "B.j.laventie", "G.prevost", "L.mourey"], "pmid": "24643034", "doi": "10.1371/JOURNAL.PONE.0092094"}
|
||||||
|
{"classification": "Dna", "pdb": "6F3C", "deposition_date": "2017-11-28", "title": "The cytotoxic [pt(h2bapbpy)] platinum complex interacting with the Cgtacg hexamer", "Keywords": ["Rug-dna complex", " four-way junction", " dna"], "authors": ["M.ferraroni", "C.bazzicalupi", "P.gratteri", "F.papi"], "pmid": "31046177", "doi": "10.1002/ANIE.201814532"}
|
||||||
|
{"classification": "Signaling protein/inhibitor", "pdb": "4L5M", "deposition_date": "2013-06-11", "title": "Complexe of arno sec7 domain with the protein-protein interaction Inhibitor n-(4-hydroxy-2,6-dimethylphenyl)benzenesulfonamide at ph6.5", "Keywords": ["Ec-7domain", " signaling protein-inhibitor complex"], "authors": ["F.hoh", "J.rouhana"], "pmid": "24112024", "doi": "10.1021/JM4009357"}
|
||||||
|
{"classification": "Signaling protein", "pdb": "5I6J", "deposition_date": "2016-02-16", "title": "Crystal structure of srgap2 f-barx", "Keywords": ["Rgap2", " f-bar", " fx", " signaling protein"], "authors": ["M.sporny", "J.guez-haddad", "M.n.isupov", "Y.opatowsky"], "pmid": "28333212", "doi": "10.1093/MOLBEV/MSX094"}
|
||||||
|
{"classification": "Metal binding protein", "pdb": "1Q80", "deposition_date": "2003-08-20", "title": "Solution structure and dynamics of nereis sarcoplasmic calcium binding Protein", "Keywords": ["Ll-alpha", " metal binding protein"], "authors": ["G.rabah", "R.popescu", "J.a.cox", "Y.engelborghs", "C.t.craescu"], "pmid": "15819893", "doi": "10.1111/J.1742-4658.2005.04629.X"}
|
||||||
|
{"classification": "Transferase", "pdb": "1TW1", "deposition_date": "2004-06-30", "title": "Beta-1,4-galactosyltransferase mutant met344his (m344h-gal-t1) complex With udp-galactose and magnesium", "Keywords": ["Et344his mutation; closed conformation; mn binding", " transferase"], "authors": ["B.ramakrishnan", "E.boeggeman", "P.k.qasba"], "pmid": "15449940", "doi": "10.1021/BI049007+"}
|
||||||
|
{"classification": "Rna", "pdb": "2PN4", "deposition_date": "2007-04-23", "title": "Crystal structure of hepatitis c virus ires subdomain iia", "Keywords": ["Cv", " ires", " subdoamin iia", " rna", " strontium", " hepatitis"], "authors": ["Q.zhao", "Q.han", "C.r.kissinger", "P.a.thompson"], "pmid": "18391410", "doi": "10.1107/S0907444908002011"}
|
|
@ -1,6 +1,36 @@
|
||||||
{"pid": "Q6GZX4", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 41."}], "title": "Putative transcription factor 001R;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX4", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 43"}], "title": "Putative transcription factor 001R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
{"pid": "Q6GZX3", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 42."}], "title": "Uncharacterized protein 002L;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX3", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 45"}], "title": "Uncharacterized protein 002L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
{"pid": "Q197F8", "dates": [{"date": "16-JUN-2009", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "11-JUL-2006", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 27."}], "title": "Uncharacterized protein 002R;", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus."], "references": [{"PubMed": "16912294"}, {" DOI": "10.1128/jvi.00464-06"}]}
|
{"pid": " Q197F8", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-02-23", "date_info": "entry version 29"}], "title": "Uncharacterized protein 002R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
{"pid": "Q197F7", "dates": [{"date": "16-JUN-2009", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "11-JUL-2006", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 23."}], "title": "Uncharacterized protein 003L;", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus."], "references": [{"PubMed": "16912294"}, {" DOI": "10.1128/jvi.00464-06"}]}
|
{"pid": " Q197F7", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2020-08-12", "date_info": "entry version 23"}], "title": "Uncharacterized protein 003L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
{"pid": "Q6GZX2", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 36."}], "title": "Uncharacterized protein 3R;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX2", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 3R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
{"pid": "Q6GZX1", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 34."}], "title": "Uncharacterized protein 004R;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX1", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 38"}], "title": "Uncharacterized protein 004R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197F5", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-10-12", "date_info": "entry version 32"}], "title": "Uncharacterized protein 005L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZX0", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 47"}], "title": "Uncharacterized protein 005R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q91G88", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2001-12-01", "date_info": "sequence version 1"}, {"date": "2023-06-28", "date_info": "entry version 53"}], "title": "Putative KilA-N domain-containing protein 006L", "organism_species": "Invertebrate iridescent virus 6 (IIV-6) (Chilo iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Iridovirus"], "references": [{"PubMed": "17239238"}, {"DOI": "10.1186/1743-422x-4-11"}]}
|
||||||
|
{"pid": " Q6GZW9", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 34"}], "title": "Uncharacterized protein 006R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW8", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 32"}], "title": "Uncharacterized protein 007R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197F3", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 28"}], "title": "Uncharacterized protein 007R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q197F2", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-02-23", "date_info": "entry version 22"}], "title": "Uncharacterized protein 008L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZW6", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 67"}], "title": "Putative helicase 009L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q91G85", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2001-12-01", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 38"}], "title": "Uncharacterized protein 009R", "organism_species": "Invertebrate iridescent virus 6 (IIV-6) (Chilo iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Iridovirus"], "references": [{"PubMed": "17239238"}, {"DOI": "10.1186/1743-422x-4-11"}]}
|
||||||
|
{"pid": " Q6GZW5", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 010R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197E9", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 28"}], "title": "Uncharacterized protein 011L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZW4", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 011R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW3", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 012L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197E7", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 37"}], "title": "Uncharacterized protein IIV3-013L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZW2", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 30"}], "title": "Uncharacterized protein 013R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW1", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 014R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW0", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 50"}], "title": "Uncharacterized protein 015R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV8", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 017L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV7", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 33"}], "title": "Uncharacterized protein 018L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV6", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 87"}], "title": "Putative serine/threonine-protein kinase 019R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV5", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 40"}], "title": "Uncharacterized protein 020R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV4", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 021L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197D8", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-12-14", "date_info": "entry version 35"}], "title": "Transmembrane protein 022L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZV2", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 33"}], "title": "Uncharacterized protein 023R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197D7", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 25"}], "title": "Uncharacterized protein 023R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZV1", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 024R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197D5", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-10-12", "date_info": "entry version 24"}], "title": "Uncharacterized protein 025R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q91G70", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2001-12-01", "date_info": "sequence version 1"}, {"date": "2020-08-12", "date_info": "entry version 32"}], "title": "Uncharacterized protein 026R", "organism_species": "Invertebrate iridescent virus 6 (IIV-6) (Chilo iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Iridovirus"], "references": [{"PubMed": "17239238"}, {"DOI": "10.1186/1743-422x-4-11"}]}
|
||||||
|
{"pid": " Q6GZU9", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 49"}], "title": "Uncharacterized protein 027R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZU8", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 55"}], "title": "Uncharacterized protein 028R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
|
@ -3,6 +3,7 @@ package eu.dnetlib.dhp.collection.mag
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
import eu.dnetlib.dhp.schema.oaf.{Dataset, Publication, Result}
|
import eu.dnetlib.dhp.schema.oaf.{Dataset, Publication, Result}
|
||||||
import org.apache.spark.sql.SparkSession
|
import org.apache.spark.sql.SparkSession
|
||||||
|
import org.apache.spark.sql.functions.col
|
||||||
import org.junit.jupiter.api.Assertions._
|
import org.junit.jupiter.api.Assertions._
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
|
|
||||||
|
@ -18,10 +19,8 @@ class MAGMappingTest {
|
||||||
.master("local[*]")
|
.master("local[*]")
|
||||||
.getOrCreate()
|
.getOrCreate()
|
||||||
|
|
||||||
val s = new SparkMagOrganizationAS(null, null, null)
|
val s = new SparkMAGtoOAF(null, null, null)
|
||||||
|
s.convertMAG(spark, "/Users/sandro/Downloads/", "/Users/sandro/Downloads/mag_OAF")
|
||||||
s.generateAS(spark, "/home/sandro/Downloads/mag_test", "/home/sandro/Downloads/mag_AS")
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -26,15 +26,15 @@ import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
import eu.dnetlib.dhp.schema.oaf.Software;
|
||||||
|
|
||||||
public class PrepareSimpleEntititiesJob {
|
public class PrepareSimpleEntitiesJob {
|
||||||
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(PrepareSimpleEntititiesJob.class);
|
private static final Logger log = LoggerFactory.getLogger(PrepareSimpleEntitiesJob.class);
|
||||||
|
|
||||||
public static void main(final String[] args) throws Exception {
|
public static void main(final String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
IOUtils
|
IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
PrepareSimpleEntititiesJob.class
|
PrepareSimpleEntitiesJob.class
|
||||||
.getResourceAsStream("/eu/dnetlib/dhp/broker/oa/common_params.json")));
|
.getResourceAsStream("/eu/dnetlib/dhp/broker/oa/common_params.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
|
|
@ -160,8 +160,7 @@ public class ConversionUtils {
|
||||||
.stream()
|
.stream()
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.filter(pid -> pid.getQualifier() != null)
|
.filter(pid -> pid.getQualifier() != null)
|
||||||
.filter(pid -> pid.getQualifier().getClassid() != null)
|
.filter(pid -> StringUtils.startsWithIgnoreCase(pid.getQualifier().getClassid(), ModelConstants.ORCID))
|
||||||
.filter(pid -> pid.getQualifier().getClassid().equalsIgnoreCase(ModelConstants.ORCID))
|
|
||||||
.map(StructuredProperty::getValue)
|
.map(StructuredProperty::getValue)
|
||||||
.map(ConversionUtils::cleanOrcid)
|
.map(ConversionUtils::cleanOrcid)
|
||||||
.filter(StringUtils::isNotBlank)
|
.filter(StringUtils::isNotBlank)
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>outputDir</name>
|
<name>outputDir</name>
|
||||||
<description>the path where the the generated data will be stored</description>
|
<description>the path where the generated data will be stored</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>datasourceIdWhitelist</name>
|
<name>datasourceIdWhitelist</name>
|
||||||
|
@ -179,17 +179,18 @@
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>PrepareSimpleEntititiesJob</name>
|
<name>PrepareSimpleEntititiesJob</name>
|
||||||
<class>eu.dnetlib.dhp.broker.oa.PrepareSimpleEntititiesJob</class>
|
<class>eu.dnetlib.dhp.broker.oa.PrepareSimpleEntitiesJob</class>
|
||||||
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=5000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -209,11 +210,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -234,11 +236,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -258,11 +261,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=5000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -282,11 +286,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=10000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -306,11 +311,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=2000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -332,11 +338,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -356,11 +363,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -380,11 +388,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -404,11 +413,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -428,11 +438,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -452,11 +463,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
|
@ -476,11 +488,12 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
<arg>--workingDir</arg><arg>${workingDir}</arg>
|
||||||
<arg>--outputDir</arg><arg>${outputDir}</arg>
|
<arg>--outputDir</arg><arg>${outputDir}</arg>
|
||||||
|
@ -503,6 +516,7 @@
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.dynamicAllocation.maxExecutors=${sparkMaxExecutorsForIndexing}
|
--conf spark.dynamicAllocation.maxExecutors=${sparkMaxExecutorsForIndexing}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
@ -535,6 +549,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -562,6 +577,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -585,6 +601,7 @@
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.dynamicAllocation.maxExecutors=${sparkMaxExecutorsForIndexing}
|
--conf spark.dynamicAllocation.maxExecutors=${sparkMaxExecutorsForIndexing}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
|
|
@ -0,0 +1,64 @@
|
||||||
|
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OaBrokerAuthor;
|
||||||
|
import eu.dnetlib.broker.objects.OaBrokerMainEntity;
|
||||||
|
|
||||||
|
class EnrichMissingAuthorOrcidTest {
|
||||||
|
|
||||||
|
final EnrichMissingAuthorOrcid matcher = new EnrichMissingAuthorOrcid();
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() throws Exception {}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_1() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
final List<OaBrokerAuthor> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertTrue(list.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_2() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
|
||||||
|
source.getCreators().add(new OaBrokerAuthor("Claudio Atzori", "0000-0001-9613-6639"));
|
||||||
|
target.getCreators().add(new OaBrokerAuthor("Claudio Atzori", null));
|
||||||
|
|
||||||
|
final List<OaBrokerAuthor> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertEquals(1, list.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_3() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
|
||||||
|
source.getCreators().add(new OaBrokerAuthor("Claudio Atzori", null));
|
||||||
|
target.getCreators().add(new OaBrokerAuthor("Claudio Atzori", "0000-0001-9613-6639"));
|
||||||
|
|
||||||
|
final List<OaBrokerAuthor> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertTrue(list.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_4() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
source.getCreators().add(new OaBrokerAuthor("Claudio Atzori", "0000-0001-9613-6639"));
|
||||||
|
target.getCreators().add(new OaBrokerAuthor("Claudio Atzori", "0000-0001-9613-6639"));
|
||||||
|
|
||||||
|
final List<OaBrokerAuthor> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertTrue(list.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -2,27 +2,31 @@
|
||||||
package eu.dnetlib.dhp.broker.oa.util;
|
package eu.dnetlib.dhp.broker.oa.util;
|
||||||
|
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OaBrokerMainEntity;
|
||||||
import eu.dnetlib.broker.objects.OaBrokerTypedValue;
|
import eu.dnetlib.broker.objects.OaBrokerTypedValue;
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Author;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
import eu.dnetlib.dhp.schema.oaf.Instance;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
import eu.dnetlib.dhp.schema.oaf.Result;
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||||
|
|
||||||
class ConversionUtilsTest {
|
public class ConversionUtilsTest {
|
||||||
|
|
||||||
@BeforeEach
|
@BeforeEach
|
||||||
void setUp() throws Exception {
|
public void setUp() throws Exception {}
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testAllResultPids() {
|
public void testAllResultPids() {
|
||||||
final Qualifier qf = new Qualifier();
|
final Qualifier qf = new Qualifier();
|
||||||
qf.setClassid("test");
|
qf.setClassid("test");
|
||||||
qf.setClassname("test");
|
qf.setClassname("test");
|
||||||
|
@ -91,4 +95,42 @@ class ConversionUtilsTest {
|
||||||
assertEquals(6, list.size());
|
assertEquals(6, list.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testOafResultToBrokerResult() {
|
||||||
|
|
||||||
|
final Author a1 = createAuthor("Michele Artini", "0000-0002-4406-428X");
|
||||||
|
final Author a2 = createAuthor("Claudio Atzori", "http://orcid.org/0000-0001-9613-6639");
|
||||||
|
final Author a3 = createAuthor("Alessia Bardi", null);
|
||||||
|
|
||||||
|
final Result r = new Result();
|
||||||
|
r.setAuthor(Arrays.asList(a1, a2, a3));
|
||||||
|
|
||||||
|
final OaBrokerMainEntity br = ConversionUtils.oafResultToBrokerResult(r);
|
||||||
|
|
||||||
|
assertEquals(3, br.getCreators().size());
|
||||||
|
assertEquals("0000-0002-4406-428X", br.getCreators().get(0).getOrcid());
|
||||||
|
assertEquals("0000-0001-9613-6639", br.getCreators().get(1).getOrcid());
|
||||||
|
assertNull(br.getCreators().get(2).getOrcid());
|
||||||
|
}
|
||||||
|
|
||||||
|
private Author createAuthor(final String name, final String orcid) {
|
||||||
|
|
||||||
|
final Author a = new Author();
|
||||||
|
a.setFullname("Michele Artini");
|
||||||
|
|
||||||
|
if (orcid != null) {
|
||||||
|
final Qualifier q = new Qualifier();
|
||||||
|
q.setClassid(ModelConstants.ORCID);
|
||||||
|
q.setClassname(ModelConstants.ORCID);
|
||||||
|
q.setSchemeid("dnet:pids");
|
||||||
|
q.setSchemename("dnet:pids");
|
||||||
|
|
||||||
|
final StructuredProperty pid = new StructuredProperty();
|
||||||
|
pid.setQualifier(q);
|
||||||
|
pid.setValue(orcid);
|
||||||
|
|
||||||
|
a.setPid(Arrays.asList(pid));
|
||||||
|
}
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -203,8 +203,8 @@ public class SparkCreateMergeRels extends AbstractSparkAction {
|
||||||
WindowSpec w = Window
|
WindowSpec w = Window
|
||||||
.partitionBy("groupId")
|
.partitionBy("groupId")
|
||||||
.orderBy(
|
.orderBy(
|
||||||
col("lastUsage").desc_nulls_last(),
|
|
||||||
col("pidType").asc_nulls_last(),
|
col("pidType").asc_nulls_last(),
|
||||||
|
col("lastUsage").desc_nulls_last(),
|
||||||
col("collectedfrom").desc_nulls_last(),
|
col("collectedfrom").desc_nulls_last(),
|
||||||
col("date").asc_nulls_last(),
|
col("date").asc_nulls_last(),
|
||||||
col("id").asc_nulls_last());
|
col("id").asc_nulls_last());
|
||||||
|
|
|
@ -22,7 +22,9 @@ import eu.dnetlib.dhp.oa.dedup.model.OrgSimRel;
|
||||||
import eu.dnetlib.dhp.schema.common.EntityType;
|
import eu.dnetlib.dhp.schema.common.EntityType;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Organization;
|
import eu.dnetlib.dhp.schema.oaf.Organization;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
import eu.dnetlib.dhp.utils.ISLookupClientFactory;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
@ -164,12 +166,12 @@ public class SparkPrepareNewOrgs extends AbstractSparkAction {
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Tuple2<String, Organization>, Tuple2<String, String>>, OrgSimRel>) r -> new OrgSimRel(
|
(MapFunction<Tuple2<Tuple2<String, Organization>, Tuple2<String, String>>, OrgSimRel>) r -> new OrgSimRel(
|
||||||
"",
|
"",
|
||||||
r._1()._2().getOriginalId().get(0),
|
Optional.ofNullable(r._1()._2().getOriginalId()).map(oid -> oid.get(0)).orElse(null),
|
||||||
r._1()._2().getLegalname() != null ? r._1()._2().getLegalname().getValue() : "",
|
Optional.ofNullable(r._1()._2().getLegalname()).map(Field::getValue).orElse(""),
|
||||||
r._1()._2().getLegalshortname() != null ? r._1()._2().getLegalshortname().getValue() : "",
|
Optional.ofNullable(r._1()._2().getLegalshortname()).map(Field::getValue).orElse(""),
|
||||||
r._1()._2().getCountry() != null ? r._1()._2().getCountry().getClassid() : "",
|
Optional.ofNullable(r._1()._2().getCountry()).map(Qualifier::getClassid).orElse(""),
|
||||||
r._1()._2().getWebsiteurl() != null ? r._1()._2().getWebsiteurl().getValue() : "",
|
Optional.ofNullable(r._1()._2().getWebsiteurl()).map(Field::getValue).orElse(""),
|
||||||
r._1()._2().getCollectedfrom().get(0).getValue(),
|
Optional.ofNullable(r._1()._2().getCollectedfrom()).map(cf -> cf.get(0).getValue()).orElse(null),
|
||||||
"",
|
"",
|
||||||
structuredPropertyListToString(r._1()._2().getPid()),
|
structuredPropertyListToString(r._1()._2().getPid()),
|
||||||
parseECField(r._1()._2().getEclegalbody()),
|
parseECField(r._1()._2().getEclegalbody()),
|
||||||
|
|
|
@ -217,7 +217,7 @@ public class SparkPrepareOrgRels extends AbstractSparkAction {
|
||||||
final Organization o = r._2()._2();
|
final Organization o = r._2()._2();
|
||||||
return new OrgSimRel(
|
return new OrgSimRel(
|
||||||
r._1()._1(),
|
r._1()._1(),
|
||||||
o.getOriginalId().get(0),
|
Optional.ofNullable(o.getOriginalId()).map(oid -> oid.get(0)).orElse(null),
|
||||||
Optional.ofNullable(o.getLegalname()).map(Field::getValue).orElse(""),
|
Optional.ofNullable(o.getLegalname()).map(Field::getValue).orElse(""),
|
||||||
Optional.ofNullable(o.getLegalshortname()).map(Field::getValue).orElse(""),
|
Optional.ofNullable(o.getLegalshortname()).map(Field::getValue).orElse(""),
|
||||||
Optional.ofNullable(o.getCountry()).map(Qualifier::getClassid).orElse(""),
|
Optional.ofNullable(o.getCountry()).map(Qualifier::getClassid).orElse(""),
|
||||||
|
@ -249,7 +249,9 @@ public class SparkPrepareOrgRels extends AbstractSparkAction {
|
||||||
.map(
|
.map(
|
||||||
(MapFunction<Tuple2<Tuple2<String, OrgSimRel>, Tuple2<String, Organization>>, OrgSimRel>) r -> {
|
(MapFunction<Tuple2<Tuple2<String, OrgSimRel>, Tuple2<String, Organization>>, OrgSimRel>) r -> {
|
||||||
OrgSimRel orgSimRel = r._1()._2();
|
OrgSimRel orgSimRel = r._1()._2();
|
||||||
orgSimRel.setLocal_id(r._2()._2().getOriginalId().get(0));
|
orgSimRel
|
||||||
|
.setLocal_id(
|
||||||
|
Optional.ofNullable(r._2()._2().getOriginalId()).map(oid -> oid.get(0)).orElse(null));
|
||||||
return orgSimRel;
|
return orgSimRel;
|
||||||
},
|
},
|
||||||
Encoders.bean(OrgSimRel.class));
|
Encoders.bean(OrgSimRel.class));
|
||||||
|
|
|
@ -15,4 +15,12 @@
|
||||||
<name>oozie.action.sharelib.for.spark</name>
|
<name>oozie.action.sharelib.for.spark</name>
|
||||||
<value>spark2</value>
|
<value>spark2</value>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>hiveMetastoreUris</name>
|
||||||
|
<value>thrift://iis-cdh5-test-m3.ocean.icm.edu.pl:9083</value>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>pivotHistoryDatabase</name>
|
||||||
|
<value>​</value>
|
||||||
|
</property>
|
||||||
</configuration>
|
</configuration>
|
|
@ -198,6 +198,8 @@
|
||||||
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
<arg>--isLookUpUrl</arg><arg>${isLookUpUrl}</arg>
|
||||||
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
|
<arg>--actionSetId</arg><arg>${actionSetId}</arg>
|
||||||
<arg>--cutConnectedComponent</arg><arg>${cutConnectedComponent}</arg>
|
<arg>--cutConnectedComponent</arg><arg>${cutConnectedComponent}</arg>
|
||||||
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
|
<arg>--pivotHistoryDatabase</arg><arg>${pivotHistoryDatabase}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="PrepareOrgRels"/>
|
<ok to="PrepareOrgRels"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -0,0 +1,103 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.oa.dedup;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.FileReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Serializable;
|
||||||
|
import java.lang.reflect.InvocationTargetException;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.codehaus.jackson.map.ObjectMapper;
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.MergeUtils;
|
||||||
|
import eu.dnetlib.pace.util.MapDocumentUtil;
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
class DatasetMergerTest implements Serializable {
|
||||||
|
|
||||||
|
private List<Tuple2<String, Dataset>> datasets;
|
||||||
|
|
||||||
|
private String testEntityBasePath;
|
||||||
|
private DataInfo dataInfo;
|
||||||
|
private final String dedupId = "50|doi_________::3d18564ef27ebe9ef3bd8b4dec67e148";
|
||||||
|
private Dataset dataset_top;
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
testEntityBasePath = Paths
|
||||||
|
.get(SparkDedupTest.class.getResource("/eu/dnetlib/dhp/dedup/json").toURI())
|
||||||
|
.toFile()
|
||||||
|
.getAbsolutePath();
|
||||||
|
|
||||||
|
datasets = readSample(testEntityBasePath + "/dataset_merge.json", Dataset.class);
|
||||||
|
|
||||||
|
dataset_top = getTopPub(datasets);
|
||||||
|
|
||||||
|
dataInfo = setDI();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void datasetMergerTest() throws InstantiationException, IllegalAccessException, InvocationTargetException {
|
||||||
|
Dataset pub_merged = MergeUtils.mergeGroup(dedupId, datasets.stream().map(Tuple2::_2).iterator());
|
||||||
|
|
||||||
|
// verify id
|
||||||
|
assertEquals(dedupId, pub_merged.getId());
|
||||||
|
assertEquals(2, pub_merged.getInstance().size());
|
||||||
|
}
|
||||||
|
|
||||||
|
public DataInfo setDI() {
|
||||||
|
DataInfo dataInfo = new DataInfo();
|
||||||
|
dataInfo.setTrust("0.9");
|
||||||
|
dataInfo.setDeletedbyinference(false);
|
||||||
|
dataInfo.setInferenceprovenance("testing");
|
||||||
|
dataInfo.setInferred(true);
|
||||||
|
return dataInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Dataset getTopPub(List<Tuple2<String, Dataset>> publications) {
|
||||||
|
|
||||||
|
Double maxTrust = 0.0;
|
||||||
|
Dataset maxPub = new Dataset();
|
||||||
|
for (Tuple2<String, Dataset> publication : publications) {
|
||||||
|
Double pubTrust = Double.parseDouble(publication._2().getDataInfo().getTrust());
|
||||||
|
if (pubTrust > maxTrust) {
|
||||||
|
maxTrust = pubTrust;
|
||||||
|
maxPub = publication._2();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return maxPub;
|
||||||
|
}
|
||||||
|
|
||||||
|
public <T> List<Tuple2<String, T>> readSample(String path, Class<T> clazz) {
|
||||||
|
List<Tuple2<String, T>> res = new ArrayList<>();
|
||||||
|
BufferedReader reader;
|
||||||
|
try {
|
||||||
|
reader = new BufferedReader(new FileReader(path));
|
||||||
|
String line = reader.readLine();
|
||||||
|
while (line != null) {
|
||||||
|
res
|
||||||
|
.add(
|
||||||
|
new Tuple2<>(
|
||||||
|
MapDocumentUtil.getJPathString("$.id", line),
|
||||||
|
new ObjectMapper().readValue(line, clazz)));
|
||||||
|
// read next line
|
||||||
|
line = reader.readLine();
|
||||||
|
}
|
||||||
|
reader.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -93,14 +93,14 @@ class EntityMergerTest implements Serializable {
|
||||||
assertEquals(pub_top.getJournal().getConferencedate(), pub_merged.getJournal().getConferencedate());
|
assertEquals(pub_top.getJournal().getConferencedate(), pub_merged.getJournal().getConferencedate());
|
||||||
assertEquals(pub_top.getJournal().getConferenceplace(), pub_merged.getJournal().getConferenceplace());
|
assertEquals(pub_top.getJournal().getConferenceplace(), pub_merged.getJournal().getConferenceplace());
|
||||||
assertEquals("OPEN", pub_merged.getBestaccessright().getClassid());
|
assertEquals("OPEN", pub_merged.getBestaccessright().getClassid());
|
||||||
assertEquals(pub_top.getResulttype(), pub_merged.getResulttype());
|
assertEquals(pub_top.getResulttype().getClassid(), pub_merged.getResulttype().getClassid());
|
||||||
assertEquals(pub_top.getLanguage(), pub_merged.getLanguage());
|
assertEquals(pub_top.getLanguage().getClassid(), pub_merged.getLanguage().getClassid());
|
||||||
assertEquals(pub_top.getPublisher(), pub_merged.getPublisher());
|
assertEquals("Elsevier BV", pub_merged.getPublisher().getValue());
|
||||||
assertEquals(pub_top.getEmbargoenddate(), pub_merged.getEmbargoenddate());
|
assertEquals(pub_top.getEmbargoenddate().getValue(), pub_merged.getEmbargoenddate().getValue());
|
||||||
assertEquals(pub_top.getResourcetype().getClassid(), "");
|
assertEquals(pub_top.getResourcetype().getClassid(), "");
|
||||||
assertEquals(pub_top.getDateoftransformation(), pub_merged.getDateoftransformation());
|
assertEquals(pub_top.getDateoftransformation(), pub_merged.getDateoftransformation());
|
||||||
assertEquals(pub_top.getOaiprovenance(), pub_merged.getOaiprovenance());
|
assertEquals(pub_top.getOaiprovenance(), pub_merged.getOaiprovenance());
|
||||||
assertEquals(pub_top.getDateofcollection(), pub_merged.getDateofcollection());
|
// assertEquals(pub_top.getDateofcollection(), pub_merged.getDateofcollection());
|
||||||
assertEquals(3, pub_merged.getInstance().size());
|
assertEquals(3, pub_merged.getInstance().size());
|
||||||
assertEquals(2, pub_merged.getCountry().size());
|
assertEquals(2, pub_merged.getCountry().size());
|
||||||
assertEquals(0, pub_merged.getSubject().size());
|
assertEquals(0, pub_merged.getSubject().size());
|
||||||
|
|
|
@ -49,7 +49,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "country",
|
"field": "country",
|
||||||
"comparator": "exactMatch",
|
"comparator": "countryMatch",
|
||||||
"weight": 1,
|
"weight": 1,
|
||||||
"countIfUndefined": "true",
|
"countIfUndefined": "true",
|
||||||
"params": {}
|
"params": {}
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -73,12 +73,6 @@
|
||||||
"name": "Irish Nephrology Society",
|
"name": "Irish Nephrology Society",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "100011062",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100011062",
|
|
||||||
"name": "Asian Spinal Cord Network",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "100011096",
|
"id": "100011096",
|
||||||
"uri": "http://dx.doi.org/10.13039/100011096",
|
"uri": "http://dx.doi.org/10.13039/100011096",
|
||||||
|
@ -223,12 +217,6 @@
|
||||||
"name": "Global Brain Health Institute",
|
"name": "Global Brain Health Institute",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "100015776",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100015776",
|
|
||||||
"name": "Health and Social Care Board",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "100015992",
|
"id": "100015992",
|
||||||
"uri": "http://dx.doi.org/10.13039/100015992",
|
"uri": "http://dx.doi.org/10.13039/100015992",
|
||||||
|
@ -403,18 +391,6 @@
|
||||||
"name": "Irish Hospice Foundation",
|
"name": "Irish Hospice Foundation",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100001596",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001596",
|
|
||||||
"name": "Irish Research Council for Science, Engineering and Technology",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "501100001597",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001597",
|
|
||||||
"name": "Irish Research Council for the Humanities and Social Sciences",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100001598",
|
"id": "501100001598",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001598",
|
"uri": "http://dx.doi.org/10.13039/501100001598",
|
||||||
|
@ -515,7 +491,7 @@
|
||||||
"id": "501100002081",
|
"id": "501100002081",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100002081",
|
"uri": "http://dx.doi.org/10.13039/501100002081",
|
||||||
"name": "Irish Research Council",
|
"name": "Irish Research Council",
|
||||||
"synonym": []
|
"synonym": ["501100001596", "501100001597"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100002736",
|
"id": "501100002736",
|
||||||
|
|
|
@ -560,7 +560,15 @@ case object Crossref2Oaf {
|
||||||
"10.13039/501100000266" | "10.13039/501100006041" | "10.13039/501100000265" | "10.13039/501100000270" |
|
"10.13039/501100000266" | "10.13039/501100006041" | "10.13039/501100000265" | "10.13039/501100000270" |
|
||||||
"10.13039/501100013589" | "10.13039/501100000271" =>
|
"10.13039/501100013589" | "10.13039/501100000271" =>
|
||||||
generateSimpleRelationFromAward(funder, "ukri________", a => a)
|
generateSimpleRelationFromAward(funder, "ukri________", a => a)
|
||||||
|
//HFRI
|
||||||
|
case "10.13039/501100013209" =>
|
||||||
|
generateSimpleRelationFromAward(funder, "hfri________", a => a)
|
||||||
|
val targetId = getProjectId("hfri________", "1e5e62235d094afd01cd56e65112fc63")
|
||||||
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
|
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
||||||
|
//ERASMUS+
|
||||||
|
case "10.13039/501100010790" =>
|
||||||
|
generateSimpleRelationFromAward(funder, "erasmusplus_", a => a)
|
||||||
case _ => logger.debug("no match for " + funder.DOI.get)
|
case _ => logger.debug("no match for " + funder.DOI.get)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,7 +172,7 @@ public class SparkBulkTagJob {
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath + "project");
|
.json(outputPath + "project");
|
||||||
|
|
||||||
readPath(spark, outputPath + "project", Datasource.class)
|
readPath(spark, outputPath + "project", Project.class)
|
||||||
.write()
|
.write()
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
|
|
|
@ -53,6 +53,8 @@ public class Constraints implements Serializable {
|
||||||
|
|
||||||
for (Constraint sc : constraint) {
|
for (Constraint sc : constraint) {
|
||||||
boolean verified = false;
|
boolean verified = false;
|
||||||
|
if (!param.containsKey(sc.getField()))
|
||||||
|
return false;
|
||||||
for (String value : param.get(sc.getField())) {
|
for (String value : param.get(sc.getField())) {
|
||||||
if (sc.verifyCriteria(value.trim())) {
|
if (sc.verifyCriteria(value.trim())) {
|
||||||
verified = true;
|
verified = true;
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.Row;
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -84,19 +85,26 @@ public class SparkCountryPropagationJob {
|
||||||
Dataset<R> res = readPath(spark, sourcePath, resultClazz);
|
Dataset<R> res = readPath(spark, sourcePath, resultClazz);
|
||||||
|
|
||||||
log.info("Reading prepared info: {}", preparedInfoPath);
|
log.info("Reading prepared info: {}", preparedInfoPath);
|
||||||
Dataset<ResultCountrySet> prepared = spark
|
final Dataset<Row> preparedInfoRaw = spark
|
||||||
.read()
|
.read()
|
||||||
.json(preparedInfoPath)
|
.json(preparedInfoPath);
|
||||||
.as(Encoders.bean(ResultCountrySet.class));
|
|
||||||
|
|
||||||
res
|
|
||||||
.joinWith(prepared, res.col("id").equalTo(prepared.col("resultId")), "left_outer")
|
|
||||||
.map(getCountryMergeFn(), Encoders.bean(resultClazz))
|
|
||||||
.write()
|
|
||||||
.option("compression", "gzip")
|
|
||||||
.mode(SaveMode.Overwrite)
|
|
||||||
.json(outputPath);
|
|
||||||
|
|
||||||
|
if (!preparedInfoRaw.isEmpty()) {
|
||||||
|
final Dataset<ResultCountrySet> prepared = preparedInfoRaw.as(Encoders.bean(ResultCountrySet.class));
|
||||||
|
res
|
||||||
|
.joinWith(prepared, res.col("id").equalTo(prepared.col("resultId")), "left_outer")
|
||||||
|
.map(getCountryMergeFn(), Encoders.bean(resultClazz))
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(outputPath);
|
||||||
|
} else {
|
||||||
|
res
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(outputPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> MapFunction<Tuple2<R, ResultCountrySet>, R> getCountryMergeFn() {
|
private static <R extends Result> MapFunction<Tuple2<R, ResultCountrySet>, R> getCountryMergeFn() {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -147,6 +147,7 @@ public class CleanGraphSparkJob {
|
||||||
.map((MapFunction<T, T>) GraphCleaningFunctions::fixVocabularyNames, Encoders.bean(clazz))
|
.map((MapFunction<T, T>) GraphCleaningFunctions::fixVocabularyNames, Encoders.bean(clazz))
|
||||||
.map((MapFunction<T, T>) value -> OafCleaner.apply(value, mapping), Encoders.bean(clazz))
|
.map((MapFunction<T, T>) value -> OafCleaner.apply(value, mapping), Encoders.bean(clazz))
|
||||||
.map((MapFunction<T, T>) value -> GraphCleaningFunctions.cleanup(value, vocs), Encoders.bean(clazz))
|
.map((MapFunction<T, T>) value -> GraphCleaningFunctions.cleanup(value, vocs), Encoders.bean(clazz))
|
||||||
|
.map((MapFunction<T, T>) GraphCleaningFunctions::dedicatedUglyHacks, Encoders.bean(clazz))
|
||||||
.filter((FilterFunction<T>) GraphCleaningFunctions::filter);
|
.filter((FilterFunction<T>) GraphCleaningFunctions::filter);
|
||||||
|
|
||||||
// read the master-duplicate tuples
|
// read the master-duplicate tuples
|
||||||
|
|
|
@ -50,7 +50,7 @@ public class CleaningRuleMap extends HashMap<Class<?>, SerializableConsumer<Obje
|
||||||
* @param subject
|
* @param subject
|
||||||
*/
|
*/
|
||||||
private static void cleanSubjectForVocabulary(String vocabularyId, VocabularyGroup vocabularies,
|
private static void cleanSubjectForVocabulary(String vocabularyId, VocabularyGroup vocabularies,
|
||||||
Subject subject) {
|
Subject subject) {
|
||||||
|
|
||||||
vocabularies.find(vocabularyId).ifPresent(vocabulary -> {
|
vocabularies.find(vocabularyId).ifPresent(vocabulary -> {
|
||||||
if (ModelConstants.DNET_SUBJECT_KEYWORD.equalsIgnoreCase(subject.getQualifier().getClassid())) {
|
if (ModelConstants.DNET_SUBJECT_KEYWORD.equalsIgnoreCase(subject.getQualifier().getClassid())) {
|
||||||
|
@ -61,13 +61,14 @@ public class CleaningRuleMap extends HashMap<Class<?>, SerializableConsumer<Obje
|
||||||
subject.getQualifier().setClassname(vocabulary.getName());
|
subject.getQualifier().setClassname(vocabulary.getName());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
final String provenanceActionClassId = Optional.ofNullable(subject.getDataInfo())
|
final String provenanceActionClassId = Optional
|
||||||
.map(DataInfo::getProvenanceaction)
|
.ofNullable(subject.getDataInfo())
|
||||||
.map(Qualifier::getClassid)
|
.map(DataInfo::getProvenanceaction)
|
||||||
.orElse(null);
|
.map(Qualifier::getClassid)
|
||||||
|
.orElse(null);
|
||||||
|
|
||||||
if (vocabularyId.equals(subject.getQualifier().getClassid()) &&
|
if (vocabularyId.equals(subject.getQualifier().getClassid()) &&
|
||||||
!"subject:fos".equals(provenanceActionClassId)) {
|
!"subject:fos".equals(provenanceActionClassId)) {
|
||||||
|
|
||||||
Qualifier syn = vocabulary.getSynonymAsQualifier(subject.getValue());
|
Qualifier syn = vocabulary.getSynonymAsQualifier(subject.getValue());
|
||||||
VocabularyTerm term = vocabulary.getTerm(subject.getValue());
|
VocabularyTerm term = vocabulary.getTerm(subject.getValue());
|
||||||
|
|
|
@ -398,6 +398,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
||||||
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
|
o.setEcsmevalidated(field(Boolean.toString(rs.getBoolean("ecsmevalidated")), info));
|
||||||
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
|
o.setEcnutscode(field(Boolean.toString(rs.getBoolean("ecnutscode")), info));
|
||||||
o.setCountry(prepareQualifierSplitting(rs.getString("country")));
|
o.setCountry(prepareQualifierSplitting(rs.getString("country")));
|
||||||
|
o.setOrganizationType(Organization.OrganizationType.valueOf(rs.getString("typology")));
|
||||||
o.setDataInfo(info);
|
o.setDataInfo(info);
|
||||||
o.setLastupdatetimestamp(lastUpdateTimestamp);
|
o.setLastupdatetimestamp(lastUpdateTimestamp);
|
||||||
|
|
||||||
|
|
|
@ -156,6 +156,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -190,6 +191,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -224,6 +226,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -258,6 +261,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -292,6 +296,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -326,6 +331,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -360,6 +366,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
@ -394,6 +401,7 @@
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -116,17 +116,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=10000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/publication</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/publication</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Publication</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>8000</arg>
|
<arg>--numPartitions</arg><arg>10000</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -143,17 +145,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=4000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/dataset</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/dataset</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Dataset</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>4000</arg>
|
<arg>--numPartitions</arg><arg>8000</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -170,11 +174,13 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=8000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/otherresearchproduct</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/otherresearchproduct</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
|
@ -197,17 +203,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=1000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/software</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/software</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Software</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>300</arg>
|
<arg>--numPartitions</arg><arg>1000</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -224,17 +232,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=200
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/datasource</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/datasource</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Datasource</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>100</arg>
|
<arg>--numPartitions</arg><arg>200</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -251,17 +261,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=1000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/organization</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/organization</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Organization</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>400</arg>
|
<arg>--numPartitions</arg><arg>1000</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -278,17 +290,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=1000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/project</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/project</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Project</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>100</arg>
|
<arg>--numPartitions</arg><arg>1000</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -305,17 +319,19 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/relation</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/relation</arg>
|
||||||
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
<arg>--hiveDbName</arg><arg>${hiveDbName}</arg>
|
||||||
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
<arg>--className</arg><arg>eu.dnetlib.dhp.schema.oaf.Relation</arg>
|
||||||
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
<arg>--hiveMetastoreUris</arg><arg>${hiveMetastoreUris}</arg>
|
||||||
<arg>--numPartitions</arg><arg>10000</arg>
|
<arg>--numPartitions</arg><arg>15000</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="join_import"/>
|
<ok to="join_import"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -85,7 +85,7 @@
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<fork name="fork_downloads_csv">
|
<fork name="fork_downloads_csv">
|
||||||
<path start="download_gold"/>
|
<path start="download_gold"/>
|
||||||
<path start="download_doaj_json"/>
|
<path start="download_doaj_json"/>
|
||||||
</fork>
|
</fork>
|
||||||
|
@ -223,11 +223,13 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--hostedByMapPath</arg><arg>${hostedByMapPath}</arg>
|
<arg>--hostedByMapPath</arg><arg>${hostedByMapPath}</arg>
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
@ -253,11 +255,13 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
@ -278,6 +282,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -45,6 +45,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.shuffle.partitions=15000
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
@ -79,6 +80,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.shuffle.partitions=10000
|
--conf spark.sql.shuffle.partitions=10000
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue