Compare commits
362 Commits
affiliatio
...
main
Author | SHA1 | Date |
---|---|---|
Miriam Baglioni | ed560dacc0 | |
Miriam Baglioni | 07a1f2b31c | |
Claudio Atzori | 80d7b842e4 | |
Claudio Atzori | dd397d107d | |
Giambattista Bloisi | 3152382ae8 | |
Claudio Atzori | a50e04154e | |
Claudio Atzori | c4e8aaca1f | |
Claudio Atzori | 1596d70224 | |
Claudio Atzori | 5d030d1118 | |
Claudio Atzori | 6e0b6a886f | |
Claudio Atzori | 3854fcc5e0 | |
Miriam Baglioni | 371154d74f | |
Claudio Atzori | 4e9f64e01a | |
Giambattista Bloisi | d175a9745f | |
Michele De Bonis | fe70caa33c | |
Claudio Atzori | 81bfe3fe32 | |
Miriam Baglioni | 0765641979 | |
Miriam Baglioni | d0eba032cd | |
Miriam Baglioni | 7cd8171268 | |
Miriam Baglioni | a54d021c37 | |
Miriam Baglioni | 6eea075324 | |
Claudio Atzori | 2ba67f08d3 | |
Miriam Baglioni | df39360822 | |
Claudio Atzori | c1a309df75 | |
Claudio Atzori | 5fdc286eb9 | |
Claudio Atzori | e7f6eb82df | |
Claudio Atzori | 9c7711310e | |
Michele Artini | 0c66b8589d | |
Michele Artini | 758d4acd05 | |
Sandro La Bruzzo | 890190b7ae | |
Claudio Atzori | 24b5dc97c6 | |
Claudio Atzori | c648531ccb | |
Giambattista Bloisi | 10cad80d4d | |
Giambattista Bloisi | 37b9bdc10c | |
Giambattista Bloisi | e7150eea7b | |
Giambattista Bloisi | 23477f3e80 | |
Claudio Atzori | ce78752aa3 | |
Claudio Atzori | 152cb47375 | |
Miriam Baglioni | f1dc0050c7 | |
Miriam Baglioni | 42531afc3e | |
Miriam Baglioni | 907eeadce8 | |
Claudio Atzori | 6b4fa7b8b9 | |
Claudio Atzori | b8bc237079 | |
Claudio Atzori | ed6d71fc70 | |
Miriam Baglioni | cbe877b73c | |
Claudio Atzori | 5fc413a5df | |
Claudio Atzori | 97c9706469 | |
Claudio Atzori | 07e7b9315c | |
Alessia | 39810c6e7e | |
Claudio Atzori | e0f58afd30 | |
Claudio Atzori | 60cf7d86a1 | |
Miriam Baglioni | 8f11dfe554 | |
Claudio Atzori | d20a5e020a | |
Claudio Atzori | 3d1d8e6036 | |
Claudio Atzori | 0b1c58358b | |
Claudio Atzori | b70a440aca | |
Michele Artini | 36c3df1652 | |
Claudio Atzori | 2f13683285 | |
Claudio Atzori | 5ab409dcab | |
Claudio Atzori | b756cfeb85 | |
Claudio Atzori | 51d6a541bd | |
Claudio Atzori | 07ce92cef2 | |
Miriam Baglioni | f043b7b096 | |
Claudio Atzori | 153b56eeff | |
Claudio Atzori | ed97ba4565 | |
Claudio Atzori | 7b398a6d0b | |
Claudio Atzori | 13f6506ce5 | |
Claudio Atzori | 3d9ddaa23a | |
Claudio Atzori | c06dfdfd86 | |
Claudio Atzori | b822b34abe | |
Michele De Bonis | ea1841fbd2 | |
Miriam Baglioni | 4dbce39237 | |
Miriam Baglioni | 3ee8a7d18a | |
Claudio Atzori | ee7deb3f60 | |
Claudio Atzori | 157cc8be87 | |
Claudio Atzori | 023099a921 | |
Claudio Atzori | 786c217085 | |
Lampros Smyrnaios | c858c02111 | |
Claudio Atzori | 8220e27110 | |
Claudio Atzori | bc993d49c1 | |
Claudio Atzori | 1dc7458de2 | |
Claudio Atzori | a7a54aab47 | |
Miriam Baglioni | eaa00a4199 | |
Claudio Atzori | fb731b6d46 | |
Miriam Baglioni | b6da35e736 | |
Lampros Smyrnaios | 3c9b8de892 | |
Antonis Lempesis | c67ef157d3 | |
Lampros Smyrnaios | c23f3031ed | |
Claudio Atzori | 8ec151aa3d | |
Claudio Atzori | 2636936162 | |
Miriam Baglioni | ef437a8cdf | |
Miriam Baglioni | 86088ef26e | |
Miriam Baglioni | 143c525343 | |
Claudio Atzori | c371513d43 | |
Claudio Atzori | 71927ca818 | |
Giambattista Bloisi | 46018dc804 | |
Miriam Baglioni | 3efd5b1308 | |
Miriam Baglioni | 196fa55774 | |
Miriam Baglioni | 50805e3fc1 | |
Claudio Atzori | d39a1054b8 | |
Claudio Atzori | 576efc1857 | |
Claudio Atzori | efc1632e16 | |
Claudio Atzori | 91b49366c6 | |
Claudio Atzori | 5e05385d35 | |
Miriam Baglioni | c4d9b5b9d2 | |
Miriam Baglioni | bf9a5e6314 | |
Miriam Baglioni | 9d79ddb3dd | |
Miriam Baglioni | 907aa28c6c | |
Miriam Baglioni | 3955ceaa76 | |
Miriam Baglioni | 128c143394 | |
Claudio Atzori | 5133993ee5 | |
Claudio Atzori | 5cf259a851 | |
Claudio Atzori | e1828fc60e | |
Claudio Atzori | 56920b447d | |
Giambattista Bloisi | 3feab5d92d | |
Claudio Atzori | 6be783caec | |
Claudio Atzori | b703f94f09 | |
Miriam Baglioni | 14f275ffaf | |
Claudio Atzori | a428e7be7e | |
Claudio Atzori | 8e45c5baa8 | |
Claudio Atzori | db5e18c784 | |
Claudio Atzori | fb266efbcb | |
Claudio Atzori | d7daf54333 | |
Claudio Atzori | f99eaa0376 | |
Claudio Atzori | 23312fcc1e | |
Miriam Baglioni | b864f0adcf | |
Miriam Baglioni | 7a44869d87 | |
Miriam Baglioni | 12ffde023f | |
Claudio Atzori | c3fe59bc78 | |
Claudio Atzori | 795e1b2629 | |
Claudio Atzori | 0c05abe50b | |
Claudio Atzori | 8fdd0244ad | |
Claudio Atzori | 18fdaaf548 | |
Claudio Atzori | 43e123c624 | |
Claudio Atzori | 62a07b7add | |
Claudio Atzori | 96bddcc921 | |
Miriam Baglioni | 0486cea4c4 | |
Claudio Atzori | 013935c593 | |
Claudio Atzori | 6132bd028e | |
Miriam Baglioni | 519db1ddef | |
Claudio Atzori | 5add51f38c | |
Claudio Atzori | f01390702e | |
Claudio Atzori | 5592ccc37a | |
Claudio Atzori | d16c15da8d | |
Claudio Atzori | 09a6d17059 | |
Claudio Atzori | d70793847d | |
Michele De Bonis | f6601ea7d1 | |
Michele De Bonis | cd4c3c934d | |
Michele Artini | a99942f7cf | |
Michele Artini | 7f7083f53e | |
Michele Artini | d9b23a76c5 | |
Michele Artini | 841ca92246 | |
Michele Artini | 3bcfc40293 | |
Giambattista Bloisi | 3067ea390d | |
Miriam Baglioni | c94d94035c | |
Michele Artini | 4374d7449e | |
Claudio Atzori | 07d009007b | |
Claudio Atzori | 071d044971 | |
Claudio Atzori | b3ddbaed58 | |
Claudio Atzori | 1416f16b35 | |
Giambattista Bloisi | ba1a0e7b4f | |
Giambattista Bloisi | 079085286c | |
Giambattista Bloisi | 8dd666aedd | |
Claudio Atzori | f21133229a | |
Claudio Atzori | d86b909db2 | |
Claudio Atzori | 08162902ab | |
Claudio Atzori | e8630a6d03 | |
Claudio Atzori | f28c63d5ef | |
Claudio Atzori | 1a8b609ed2 | |
Miriam Baglioni | 4c8706efee | |
Claudio Atzori | 4d0c59669b | |
Sandro La Bruzzo | 3c8c88bdd3 | |
Claudio Atzori | 106968adaa | |
Claudio Atzori | a8a4db96f0 | |
Sandro La Bruzzo | 37e36baf76 | |
Sandro La Bruzzo | 9d39845d1f | |
Sandro La Bruzzo | 1fbd4325f5 | |
Sandro La Bruzzo | 1f1a6a5f5f | |
Claudio Atzori | c4ec35b6cd | |
Claudio Atzori | 1726f49790 | |
Claudio Atzori | 1763d377ad | |
Claudio Atzori | a0311e8a90 | |
Claudio Atzori | 8fb05888fd | |
Claudio Atzori | 2b626815ff | |
Miriam Baglioni | b177cd5a0a | |
Serafeim Chatzopoulos | 671ba8a5a7 | |
Claudio Atzori | 5f1ed61c1f | |
Claudio Atzori | 8c03c41d5d | |
Claudio Atzori | 97454e9594 | |
Serafeim Chatzopoulos | 7e34dde774 | |
Serafeim Chatzopoulos | 24c3f92d87 | |
Serafeim Chatzopoulos | 6ce9b600c1 | |
Serafeim Chatzopoulos | 94089878fd | |
Miriam Baglioni | 0097f4e64b | |
Miriam Baglioni | 5c5a195e97 | |
Miriam Baglioni | 70b78a40c7 | |
Miriam Baglioni | f206ff42d6 | |
Miriam Baglioni | 34358afe75 | |
Miriam Baglioni | 18bfff8af3 | |
Miriam Baglioni | 69dac91659 | |
Miriam Baglioni | a9ede1e989 | |
Claudio Atzori | 242d647146 | |
Claudio Atzori | af3ffad6c4 | |
Claudio Atzori | ba5475ed4c | |
Giambattista Bloisi | 2c235e82ad | |
Claudio Atzori | 4ac06c9e37 | |
Claudio Atzori | fa692b3629 | |
Claudio Atzori | ef02648399 | |
Claudio Atzori | d13bb534f0 | |
Giambattista Bloisi | 775c3f704a | |
Sandro La Bruzzo | 9c3ab11d5b | |
Sandro La Bruzzo | 423ef30676 | |
Giambattista Bloisi | 7152d47f84 | |
Claudio Atzori | 4853c19b5e | |
Giambattista Bloisi | 1f226d1dce | |
Alessia Bardi | 6186cdc2cc | |
Alessia Bardi | d94b9bebf7 | |
Alessia Bardi | 19abba8fa7 | |
Claudio Atzori | c2f179800c | |
Serafeim Chatzopoulos | 2aed5a74be | |
Claudio Atzori | 4dc4862011 | |
Claudio Atzori | dc80ab14d3 | |
Alessia Bardi | 77a2199837 | |
Claudio Atzori | 265180bfd2 | |
Claudio Atzori | da0e9828f7 | |
Miriam Baglioni | 599828ce35 | |
Claudio Atzori | 0bc74e2000 | |
Claudio Atzori | 7180911ded | |
Claudio Atzori | da1727f93f | |
Claudio Atzori | ccac6a7f75 | |
Claudio Atzori | d512df8612 | |
Claudio Atzori | 59764145bb | |
Miriam Baglioni | 9e8e39f78a | |
Claudio Atzori | 373a5f2c83 | |
Claudio Atzori | 8af129b0c7 | |
dimitrispie | 706092bc19 | |
dimitrispie | aedd279f78 | |
Miriam Baglioni | 8dcd028eed | |
Miriam Baglioni | 8621377917 | |
Miriam Baglioni | ef2dd7a980 | |
Claudio Atzori | f3a85e224b | |
Claudio Atzori | 4ef0f2ec26 | |
Claudio Atzori | 288ec0b7d6 | |
Claudio Atzori | 5f32edd9bf | |
Claudio Atzori | e10ce92fe5 | |
Claudio Atzori | b93e1541aa | |
Claudio Atzori | d029bf0b94 | |
Michele Artini | 009d7f312f | |
Miriam Baglioni | e4b27182d0 | |
Giambattista Bloisi | 758e662ab8 | |
Giambattista Bloisi | 485f9d18cb | |
Michele Artini | a92206dab5 | |
Miriam Baglioni | d9506035e4 | |
Alessia Bardi | 118e72d7db | |
Alessia Bardi | 5befd93d7d | |
Michele Artini | cae92cf811 | |
Miriam Baglioni | b64a5eb4a5 | |
Claudio Atzori | 654ffcba60 | |
Claudio Atzori | db625e548d | |
Alessia Bardi | 04141fe259 | |
Alessia Bardi | b88f009d9f | |
Alessia Bardi | 5ffe82ffd8 | |
Alessia Bardi | 1c173642f0 | |
Alessia Bardi | 382f46a8e4 | |
Miriam Baglioni | 9fc8ebe98b | |
Miriam Baglioni | 24c41806ac | |
Miriam Baglioni | 087b5a7973 | |
Claudio Atzori | 688e3b7936 | |
Claudio Atzori | 2e465915b4 | |
Claudio Atzori | 4a4ca634f0 | |
Miriam Baglioni | c6a7602b3e | |
Miriam Baglioni | 831055a1fc | |
Miriam Baglioni | cf3d0f4f83 | |
Claudio Atzori | 4f67225fbc | |
Claudio Atzori | e093f04874 | |
Miriam Baglioni | c5a9f39141 | |
Miriam Baglioni | ecc05fe0f3 | |
Claudio Atzori | 42442ccd39 | |
Miriam Baglioni | 9a9cc6a1dd | |
Michele Artini | 200098b683 | |
Michele Artini | 9c1df15071 | |
Miriam Baglioni | 32870339f5 | |
Miriam Baglioni | 7184cc0804 | |
Miriam Baglioni | 7473093c84 | |
Miriam Baglioni | 5f0906be60 | |
Claudio Atzori | 1b37516578 | |
Claudio Atzori | c1e2460293 | |
Claudio Atzori | 3800361033 | |
Michele Artini | 699736addc | |
Claudio Atzori | f86e19b282 | |
Michele Artini | d40e20f437 | |
Michele Artini | 4953ae5649 | |
Miriam Baglioni | c60d3a2b46 | |
Claudio Atzori | 7becdaf31d | |
Miriam Baglioni | b713132db7 | |
Miriam Baglioni | 11f2b470d3 | |
Sandro La Bruzzo | 91c70b15a5 | |
Claudio Atzori | f910b7379d | |
Claudio Atzori | 33bdad104e | |
Claudio Atzori | 5816ded93f | |
Claudio Atzori | 46972f8393 | |
Claudio Atzori | da85ca697d | |
Miriam Baglioni | 059e100ec7 | |
Miriam Baglioni | fc95a550c3 | |
Miriam Baglioni | 6901ac91b1 | |
Claudio Atzori | 08c4588d47 | |
Miriam Baglioni | 29d3da85f1 | |
Miriam Baglioni | 33a2b1b5dc | |
Miriam Baglioni | c6df8327b3 | |
Miriam Baglioni | 935aa367d8 | |
Miriam Baglioni | 43aedbdfe5 | |
Miriam Baglioni | b6da9b67ff | |
Claudio Atzori | a34c8b6f81 | |
Miriam Baglioni | 122e75aa17 | |
Miriam Baglioni | cee7a45b1d | |
Claudio Atzori | ed64618235 | |
Claudio Atzori | 8742934843 | |
Claudio Atzori | 13cc592f39 | |
Claudio Atzori | af15b1e48d | |
Claudio Atzori | eb45ba7af0 | |
Claudio Atzori | a929dc5fee | |
Miriam Baglioni | 5f9383b2d9 | |
Miriam Baglioni | b18bbca8af | |
dimitrispie | 55fa3b2a17 | |
Claudio Atzori | 80c5e0f637 | |
Claudio Atzori | c01d528ab2 | |
Claudio Atzori | e6d788d27a | |
Claudio Atzori | 930f118673 | |
Claudio Atzori | b2c3071e72 | |
Claudio Atzori | 10ec074f79 | |
Claudio Atzori | 7225fe9cbe | |
Miriam Baglioni | 869e129288 | |
Miriam Baglioni | 840465958b | |
Claudio Atzori | bdc8f993d0 | |
Miriam Baglioni | ec87149cb3 | |
Miriam Baglioni | b42e2c9df6 | |
Miriam Baglioni | 1329aa8479 | |
Miriam Baglioni | a0ee1a8640 | |
Claudio Atzori | 96062164f9 | |
Claudio Atzori | 35bb7c423f | |
Claudio Atzori | fd87571506 | |
Claudio Atzori | c527112e33 | |
Claudio Atzori | 65209359bc | |
Claudio Atzori | d72a64ded3 | |
Claudio Atzori | 3e8499ce47 | |
Claudio Atzori | 61aacb3271 | |
Claudio Atzori | dbb567251a | |
Claudio Atzori | c7e8ad853e | |
Claudio Atzori | 0849ebfd80 | |
Claudio Atzori | 281239249e | |
Claudio Atzori | 45fc5e12be | |
Claudio Atzori | 1c05aaaa2e | |
Claudio Atzori | 01d5ad6361 | |
Claudio Atzori | d872d1cdd9 | |
Claudio Atzori | ab0efecab4 | |
Claudio Atzori | 725c3c68d0 | |
Claudio Atzori | 300ae6221c | |
Claudio Atzori | 0ec2eaba35 | |
Claudio Atzori | a387807d43 | |
Claudio Atzori | 2abe2bc137 | |
Claudio Atzori | a07c876922 | |
Claudio Atzori | cbd48bc645 |
|
@ -7,12 +7,12 @@ import java.sql.*;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.slf4j.Logger;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
public class DbClient implements Closeable {
|
public class DbClient implements Closeable {
|
||||||
|
|
||||||
private static final Log log = LogFactory.getLog(DbClient.class);
|
private static final Logger log = LoggerFactory.getLogger(DbClient.class);
|
||||||
|
|
||||||
private final Connection connection;
|
private final Connection connection;
|
||||||
|
|
||||||
|
@ -37,6 +37,8 @@ public class DbClient implements Closeable {
|
||||||
try (final Statement stmt = connection.createStatement()) {
|
try (final Statement stmt = connection.createStatement()) {
|
||||||
stmt.setFetchSize(100);
|
stmt.setFetchSize(100);
|
||||||
|
|
||||||
|
log.info("running SQL:\n\n{}\n\n", sql);
|
||||||
|
|
||||||
try (final ResultSet rs = stmt.executeQuery(sql)) {
|
try (final ResultSet rs = stmt.executeQuery(sql)) {
|
||||||
while (rs.next()) {
|
while (rs.next()) {
|
||||||
consumer.accept(rs);
|
consumer.accept(rs);
|
||||||
|
|
|
@ -1,53 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
|
|
||||||
import okhttp3.MediaType;
|
|
||||||
import okhttp3.RequestBody;
|
|
||||||
import okhttp3.internal.Util;
|
|
||||||
import okio.BufferedSink;
|
|
||||||
import okio.Okio;
|
|
||||||
import okio.Source;
|
|
||||||
|
|
||||||
public class InputStreamRequestBody extends RequestBody {
|
|
||||||
|
|
||||||
private final InputStream inputStream;
|
|
||||||
private final MediaType mediaType;
|
|
||||||
private final long lenght;
|
|
||||||
|
|
||||||
public static RequestBody create(final MediaType mediaType, final InputStream inputStream, final long len) {
|
|
||||||
|
|
||||||
return new InputStreamRequestBody(inputStream, mediaType, len);
|
|
||||||
}
|
|
||||||
|
|
||||||
private InputStreamRequestBody(InputStream inputStream, MediaType mediaType, long len) {
|
|
||||||
this.inputStream = inputStream;
|
|
||||||
this.mediaType = mediaType;
|
|
||||||
this.lenght = len;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MediaType contentType() {
|
|
||||||
return mediaType;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long contentLength() {
|
|
||||||
|
|
||||||
return lenght;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void writeTo(BufferedSink sink) throws IOException {
|
|
||||||
Source source = null;
|
|
||||||
try {
|
|
||||||
source = Okio.source(inputStream);
|
|
||||||
sink.writeAll(source);
|
|
||||||
} finally {
|
|
||||||
Util.closeQuietly(source);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
public class MissingConceptDoiException extends Throwable {
|
|
||||||
public MissingConceptDoiException(String message) {
|
|
||||||
super(message);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,363 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.net.HttpURLConnection;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
|
|
||||||
import org.apache.http.HttpHeaders;
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
import com.google.gson.Gson;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.common.api.zenodo.ZenodoModel;
|
|
||||||
import eu.dnetlib.dhp.common.api.zenodo.ZenodoModelList;
|
|
||||||
import okhttp3.*;
|
|
||||||
|
|
||||||
public class ZenodoAPIClient implements Serializable {
|
|
||||||
|
|
||||||
String urlString;
|
|
||||||
String bucket;
|
|
||||||
|
|
||||||
String deposition_id;
|
|
||||||
String access_token;
|
|
||||||
|
|
||||||
public static final MediaType MEDIA_TYPE_JSON = MediaType.parse("application/json; charset=utf-8");
|
|
||||||
|
|
||||||
private static final MediaType MEDIA_TYPE_ZIP = MediaType.parse("application/zip");
|
|
||||||
|
|
||||||
public String getUrlString() {
|
|
||||||
return urlString;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setUrlString(String urlString) {
|
|
||||||
this.urlString = urlString;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getBucket() {
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setBucket(String bucket) {
|
|
||||||
this.bucket = bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDeposition_id(String deposition_id) {
|
|
||||||
this.deposition_id = deposition_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ZenodoAPIClient(String urlString, String access_token) {
|
|
||||||
|
|
||||||
this.urlString = urlString;
|
|
||||||
this.access_token = access_token;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Brand new deposition in Zenodo. It sets the deposition_id and the bucket where to store the files to upload
|
|
||||||
*
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public int newDeposition() throws IOException {
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
URL url = new URL(urlString);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setRequestMethod("POST");
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = json.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
conn.disconnect();
|
|
||||||
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel newSubmission = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
this.bucket = newSubmission.getLinks().getBucket();
|
|
||||||
this.deposition_id = newSubmission.getId();
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Upload files in Zenodo.
|
|
||||||
*
|
|
||||||
* @param is the inputStream for the file to upload
|
|
||||||
* @param file_name the name of the file as it will appear on Zenodo
|
|
||||||
* @return the response code
|
|
||||||
*/
|
|
||||||
public int uploadIS(InputStream is, String file_name) throws IOException {
|
|
||||||
|
|
||||||
URL url = new URL(bucket + "/" + file_name);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, "application/zip");
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("PUT");
|
|
||||||
|
|
||||||
byte[] buf = new byte[8192];
|
|
||||||
int length;
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
while ((length = is.read(buf)) != -1) {
|
|
||||||
os.write(buf, 0, length);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
if (!checkOKStatus(responseCode)) {
|
|
||||||
throw new IOException("Unexpected code " + responseCode + getBody(conn));
|
|
||||||
}
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private String getBody(HttpURLConnection conn) throws IOException {
|
|
||||||
String body = "{}";
|
|
||||||
try (BufferedReader br = new BufferedReader(
|
|
||||||
new InputStreamReader(conn.getInputStream(), "utf-8"))) {
|
|
||||||
StringBuilder response = new StringBuilder();
|
|
||||||
String responseLine = null;
|
|
||||||
while ((responseLine = br.readLine()) != null) {
|
|
||||||
response.append(responseLine.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
body = response.toString();
|
|
||||||
|
|
||||||
}
|
|
||||||
return body;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Associates metadata information to the current deposition
|
|
||||||
*
|
|
||||||
* @param metadata the metadata
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public int sendMretadata(String metadata) throws IOException {
|
|
||||||
|
|
||||||
URL url = new URL(urlString + "/" + deposition_id);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("PUT");
|
|
||||||
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = metadata.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
final int responseCode = conn.getResponseCode();
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + getBody(conn));
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean checkOKStatus(int responseCode) {
|
|
||||||
|
|
||||||
if (HttpURLConnection.HTTP_OK != responseCode ||
|
|
||||||
HttpURLConnection.HTTP_CREATED != responseCode)
|
|
||||||
return true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* To publish the current deposition. It works for both new deposition or new version of an old deposition
|
|
||||||
*
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public int publish() throws IOException {
|
|
||||||
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
OkHttpClient httpClient = new OkHttpClient.Builder().connectTimeout(600, TimeUnit.SECONDS).build();
|
|
||||||
|
|
||||||
RequestBody body = RequestBody.create(json, MEDIA_TYPE_JSON);
|
|
||||||
|
|
||||||
Request request = new Request.Builder()
|
|
||||||
.url(urlString + "/" + deposition_id + "/actions/publish")
|
|
||||||
.addHeader("Authorization", "Bearer " + access_token)
|
|
||||||
.post(body)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
try (Response response = httpClient.newCall(request).execute()) {
|
|
||||||
|
|
||||||
if (!response.isSuccessful())
|
|
||||||
throw new IOException("Unexpected code " + response + response.body().string());
|
|
||||||
|
|
||||||
return response.code();
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* To create a new version of an already published deposition. It sets the deposition_id and the bucket to be used
|
|
||||||
* for the new version.
|
|
||||||
*
|
|
||||||
* @param concept_rec_id the concept record id of the deposition for which to create a new version. It is the last
|
|
||||||
* part of the url for the DOI Zenodo suggests to use to cite all versions: DOI: 10.xxx/zenodo.656930
|
|
||||||
* concept_rec_id = 656930
|
|
||||||
* @return response code
|
|
||||||
*/
|
|
||||||
public int newVersion(String concept_rec_id) throws IOException, MissingConceptDoiException {
|
|
||||||
setDepositionId(concept_rec_id, 1);
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
URL url = new URL(urlString + "/" + deposition_id + "/actions/newversion");
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("POST");
|
|
||||||
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = json.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
String latest_draft = zenodoModel.getLinks().getLatest_draft();
|
|
||||||
deposition_id = latest_draft.substring(latest_draft.lastIndexOf("/") + 1);
|
|
||||||
bucket = getBucket(latest_draft);
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* To finish uploading a version or new deposition not published
|
|
||||||
* It sets the deposition_id and the bucket to be used
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* @param deposition_id the deposition id of the not yet published upload
|
|
||||||
* concept_rec_id = 656930
|
|
||||||
* @return response code
|
|
||||||
* @throws IOException
|
|
||||||
* @throws MissingConceptDoiException
|
|
||||||
*/
|
|
||||||
public int uploadOpenDeposition(String deposition_id) throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
this.deposition_id = deposition_id;
|
|
||||||
|
|
||||||
String json = "{}";
|
|
||||||
|
|
||||||
URL url = new URL(urlString + "/" + deposition_id);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setRequestMethod("POST");
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
try (OutputStream os = conn.getOutputStream()) {
|
|
||||||
byte[] input = json.getBytes("utf-8");
|
|
||||||
os.write(input, 0, input.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
conn.disconnect();
|
|
||||||
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
bucket = zenodoModel.getLinks().getBucket();
|
|
||||||
|
|
||||||
return responseCode;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setDepositionId(String concept_rec_id, Integer page) throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
ZenodoModelList zenodoModelList = new Gson()
|
|
||||||
.fromJson(getPrevDepositions(String.valueOf(page)), ZenodoModelList.class);
|
|
||||||
|
|
||||||
for (ZenodoModel zm : zenodoModelList) {
|
|
||||||
if (zm.getConceptrecid().equals(concept_rec_id)) {
|
|
||||||
deposition_id = zm.getId();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (zenodoModelList.size() == 0)
|
|
||||||
throw new MissingConceptDoiException(
|
|
||||||
"The concept record id specified was missing in the list of depositions");
|
|
||||||
setDepositionId(concept_rec_id, page + 1);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getPrevDepositions(String page) throws IOException {
|
|
||||||
|
|
||||||
HttpUrl.Builder urlBuilder = HttpUrl.parse(urlString).newBuilder();
|
|
||||||
urlBuilder.addQueryParameter("page", page);
|
|
||||||
|
|
||||||
URL url = new URL(urlBuilder.build().toString());
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
return body;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getBucket(String inputUurl) throws IOException {
|
|
||||||
|
|
||||||
URL url = new URL(inputUurl);
|
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
|
||||||
conn.setRequestProperty(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.toString());
|
|
||||||
conn.setRequestProperty(HttpHeaders.AUTHORIZATION, "Bearer " + access_token);
|
|
||||||
conn.setDoOutput(true);
|
|
||||||
conn.setRequestMethod("GET");
|
|
||||||
|
|
||||||
String body = getBody(conn);
|
|
||||||
|
|
||||||
int responseCode = conn.getResponseCode();
|
|
||||||
|
|
||||||
conn.disconnect();
|
|
||||||
if (!checkOKStatus(responseCode))
|
|
||||||
throw new IOException("Unexpected code " + responseCode + body);
|
|
||||||
|
|
||||||
ZenodoModel zenodoModel = new Gson().fromJson(body, ZenodoModel.class);
|
|
||||||
|
|
||||||
return zenodoModel.getLinks().getBucket();
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,14 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
public class Community {
|
|
||||||
private String identifier;
|
|
||||||
|
|
||||||
public String getIdentifier() {
|
|
||||||
return identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIdentifier(String identifier) {
|
|
||||||
this.identifier = identifier;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
public class Creator {
|
|
||||||
private String affiliation;
|
|
||||||
private String name;
|
|
||||||
private String orcid;
|
|
||||||
|
|
||||||
public String getAffiliation() {
|
|
||||||
return affiliation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAffiliation(String affiliation) {
|
|
||||||
this.affiliation = affiliation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getName() {
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setName(String name) {
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getOrcid() {
|
|
||||||
return orcid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setOrcid(String orcid) {
|
|
||||||
this.orcid = orcid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Creator newInstance(String name, String affiliation, String orcid) {
|
|
||||||
Creator c = new Creator();
|
|
||||||
if (name != null) {
|
|
||||||
c.name = name;
|
|
||||||
}
|
|
||||||
if (affiliation != null) {
|
|
||||||
c.affiliation = affiliation;
|
|
||||||
}
|
|
||||||
if (orcid != null) {
|
|
||||||
c.orcid = orcid;
|
|
||||||
}
|
|
||||||
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,44 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class File implements Serializable {
|
|
||||||
private String checksum;
|
|
||||||
private String filename;
|
|
||||||
private long filesize;
|
|
||||||
private String id;
|
|
||||||
|
|
||||||
public String getChecksum() {
|
|
||||||
return checksum;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setChecksum(String checksum) {
|
|
||||||
this.checksum = checksum;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFilename() {
|
|
||||||
return filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFilename(String filename) {
|
|
||||||
this.filename = filename;
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getFilesize() {
|
|
||||||
return filesize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFilesize(long filesize) {
|
|
||||||
this.filesize = filesize;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setId(String id) {
|
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,23 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class Grant implements Serializable {
|
|
||||||
private String id;
|
|
||||||
|
|
||||||
public String getId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setId(String id) {
|
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static Grant newInstance(String id) {
|
|
||||||
Grant g = new Grant();
|
|
||||||
g.id = id;
|
|
||||||
|
|
||||||
return g;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,92 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class Links implements Serializable {
|
|
||||||
|
|
||||||
private String bucket;
|
|
||||||
|
|
||||||
private String discard;
|
|
||||||
|
|
||||||
private String edit;
|
|
||||||
private String files;
|
|
||||||
private String html;
|
|
||||||
private String latest_draft;
|
|
||||||
private String latest_draft_html;
|
|
||||||
private String publish;
|
|
||||||
|
|
||||||
private String self;
|
|
||||||
|
|
||||||
public String getBucket() {
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setBucket(String bucket) {
|
|
||||||
this.bucket = bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDiscard() {
|
|
||||||
return discard;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDiscard(String discard) {
|
|
||||||
this.discard = discard;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getEdit() {
|
|
||||||
return edit;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setEdit(String edit) {
|
|
||||||
this.edit = edit;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFiles() {
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFiles(String files) {
|
|
||||||
this.files = files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getHtml() {
|
|
||||||
return html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setHtml(String html) {
|
|
||||||
this.html = html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLatest_draft() {
|
|
||||||
return latest_draft;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLatest_draft(String latest_draft) {
|
|
||||||
this.latest_draft = latest_draft;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLatest_draft_html() {
|
|
||||||
return latest_draft_html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLatest_draft_html(String latest_draft_html) {
|
|
||||||
this.latest_draft_html = latest_draft_html;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPublish() {
|
|
||||||
return publish;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPublish(String publish) {
|
|
||||||
this.publish = publish;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getSelf() {
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSelf(String self) {
|
|
||||||
this.self = self;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,153 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class Metadata implements Serializable {
|
|
||||||
|
|
||||||
private String access_right;
|
|
||||||
private List<Community> communities;
|
|
||||||
private List<Creator> creators;
|
|
||||||
private String description;
|
|
||||||
private String doi;
|
|
||||||
private List<Grant> grants;
|
|
||||||
private List<String> keywords;
|
|
||||||
private String language;
|
|
||||||
private String license;
|
|
||||||
private PrereserveDoi prereserve_doi;
|
|
||||||
private String publication_date;
|
|
||||||
private List<String> references;
|
|
||||||
private List<RelatedIdentifier> related_identifiers;
|
|
||||||
private String title;
|
|
||||||
private String upload_type;
|
|
||||||
private String version;
|
|
||||||
|
|
||||||
public String getUpload_type() {
|
|
||||||
return upload_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setUpload_type(String upload_type) {
|
|
||||||
this.upload_type = upload_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getVersion() {
|
|
||||||
return version;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setVersion(String version) {
|
|
||||||
this.version = version;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getAccess_right() {
|
|
||||||
return access_right;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setAccess_right(String access_right) {
|
|
||||||
this.access_right = access_right;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Community> getCommunities() {
|
|
||||||
return communities;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCommunities(List<Community> communities) {
|
|
||||||
this.communities = communities;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Creator> getCreators() {
|
|
||||||
return creators;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCreators(List<Creator> creators) {
|
|
||||||
this.creators = creators;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDescription() {
|
|
||||||
return description;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDescription(String description) {
|
|
||||||
this.description = description;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getDoi() {
|
|
||||||
return doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDoi(String doi) {
|
|
||||||
this.doi = doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<Grant> getGrants() {
|
|
||||||
return grants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setGrants(List<Grant> grants) {
|
|
||||||
this.grants = grants;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getKeywords() {
|
|
||||||
return keywords;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setKeywords(List<String> keywords) {
|
|
||||||
this.keywords = keywords;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLanguage() {
|
|
||||||
return language;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLanguage(String language) {
|
|
||||||
this.language = language;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getLicense() {
|
|
||||||
return license;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLicense(String license) {
|
|
||||||
this.license = license;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PrereserveDoi getPrereserve_doi() {
|
|
||||||
return prereserve_doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPrereserve_doi(PrereserveDoi prereserve_doi) {
|
|
||||||
this.prereserve_doi = prereserve_doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getPublication_date() {
|
|
||||||
return publication_date;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPublication_date(String publication_date) {
|
|
||||||
this.publication_date = publication_date;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getReferences() {
|
|
||||||
return references;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setReferences(List<String> references) {
|
|
||||||
this.references = references;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<RelatedIdentifier> getRelated_identifiers() {
|
|
||||||
return related_identifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRelated_identifiers(List<RelatedIdentifier> related_identifiers) {
|
|
||||||
this.related_identifiers = related_identifiers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTitle() {
|
|
||||||
return title;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTitle(String title) {
|
|
||||||
this.title = title;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,25 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class PrereserveDoi implements Serializable {
|
|
||||||
private String doi;
|
|
||||||
private String recid;
|
|
||||||
|
|
||||||
public String getDoi() {
|
|
||||||
return doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDoi(String doi) {
|
|
||||||
this.doi = doi;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRecid() {
|
|
||||||
return recid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRecid(String recid) {
|
|
||||||
this.recid = recid;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
public class RelatedIdentifier implements Serializable {
|
|
||||||
private String identifier;
|
|
||||||
private String relation;
|
|
||||||
private String resource_type;
|
|
||||||
private String scheme;
|
|
||||||
|
|
||||||
public String getIdentifier() {
|
|
||||||
return identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setIdentifier(String identifier) {
|
|
||||||
this.identifier = identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRelation() {
|
|
||||||
return relation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRelation(String relation) {
|
|
||||||
this.relation = relation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getResource_type() {
|
|
||||||
return resource_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setResource_type(String resource_type) {
|
|
||||||
this.resource_type = resource_type;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getScheme() {
|
|
||||||
return scheme;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setScheme(String scheme) {
|
|
||||||
this.scheme = scheme;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,118 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class ZenodoModel implements Serializable {
|
|
||||||
|
|
||||||
private String conceptrecid;
|
|
||||||
private String created;
|
|
||||||
|
|
||||||
private List<File> files;
|
|
||||||
private String id;
|
|
||||||
private Links links;
|
|
||||||
private Metadata metadata;
|
|
||||||
private String modified;
|
|
||||||
private String owner;
|
|
||||||
private String record_id;
|
|
||||||
private String state;
|
|
||||||
private boolean submitted;
|
|
||||||
private String title;
|
|
||||||
|
|
||||||
public String getConceptrecid() {
|
|
||||||
return conceptrecid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setConceptrecid(String conceptrecid) {
|
|
||||||
this.conceptrecid = conceptrecid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getCreated() {
|
|
||||||
return created;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setCreated(String created) {
|
|
||||||
this.created = created;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<File> getFiles() {
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setFiles(List<File> files) {
|
|
||||||
this.files = files;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getId() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setId(String id) {
|
|
||||||
this.id = id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Links getLinks() {
|
|
||||||
return links;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setLinks(Links links) {
|
|
||||||
this.links = links;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Metadata getMetadata() {
|
|
||||||
return metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setMetadata(Metadata metadata) {
|
|
||||||
this.metadata = metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getModified() {
|
|
||||||
return modified;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setModified(String modified) {
|
|
||||||
this.modified = modified;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getOwner() {
|
|
||||||
return owner;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setOwner(String owner) {
|
|
||||||
this.owner = owner;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getRecord_id() {
|
|
||||||
return record_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setRecord_id(String record_id) {
|
|
||||||
this.record_id = record_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getState() {
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setState(String state) {
|
|
||||||
this.state = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean isSubmitted() {
|
|
||||||
return submitted;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setSubmitted(boolean submitted) {
|
|
||||||
this.submitted = submitted;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTitle() {
|
|
||||||
return title;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTitle(String title) {
|
|
||||||
this.title = title;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api.zenodo;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
|
|
||||||
public class ZenodoModelList extends ArrayList<ZenodoModel> {
|
|
||||||
}
|
|
|
@ -212,11 +212,11 @@ public class HttpConnector2 {
|
||||||
.format(
|
.format(
|
||||||
"Unexpected status code: %s errors: %s", urlConn.getResponseCode(),
|
"Unexpected status code: %s errors: %s", urlConn.getResponseCode(),
|
||||||
MAPPER.writeValueAsString(report)));
|
MAPPER.writeValueAsString(report)));
|
||||||
} catch (MalformedURLException | UnknownHostException e) {
|
} catch (MalformedURLException e) {
|
||||||
log.error(e.getMessage(), e);
|
log.error(e.getMessage(), e);
|
||||||
report.put(e.getClass().getName(), e.getMessage());
|
report.put(e.getClass().getName(), e.getMessage());
|
||||||
throw new CollectorException(e.getMessage(), e);
|
throw new CollectorException(e.getMessage(), e);
|
||||||
} catch (SocketTimeoutException | SocketException e) {
|
} catch (SocketTimeoutException | SocketException | UnknownHostException e) {
|
||||||
log.error(e.getMessage(), e);
|
log.error(e.getMessage(), e);
|
||||||
report.put(e.getClass().getName(), e.getMessage());
|
report.put(e.getClass().getName(), e.getMessage());
|
||||||
backoffAndSleep(getClientParams().getRetryDelay() * retryNumber * 1000);
|
backoffAndSleep(getClientParams().getRetryDelay() * retryNumber * 1000);
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2024.
|
||||||
|
* SPDX-FileCopyrightText: © 2023 Consiglio Nazionale delle Ricerche
|
||||||
|
* SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.schema.oaf;
|
||||||
|
|
||||||
|
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||||
|
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||||
|
|
||||||
|
public class HashableStructuredProperty extends StructuredProperty {
|
||||||
|
|
||||||
|
private static final long serialVersionUID = 8371670185221126045L;
|
||||||
|
|
||||||
|
public static HashableStructuredProperty newInstance(String value, Qualifier qualifier, DataInfo dataInfo) {
|
||||||
|
if (value == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
final HashableStructuredProperty sp = new HashableStructuredProperty();
|
||||||
|
sp.setValue(value);
|
||||||
|
sp.setQualifier(qualifier);
|
||||||
|
sp.setDataInfo(dataInfo);
|
||||||
|
return sp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashableStructuredProperty newInstance(StructuredProperty sp) {
|
||||||
|
HashableStructuredProperty hsp = new HashableStructuredProperty();
|
||||||
|
hsp.setQualifier(sp.getQualifier());
|
||||||
|
hsp.setValue(sp.getValue());
|
||||||
|
hsp.setQualifier(sp.getQualifier());
|
||||||
|
return hsp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static StructuredProperty toStructuredProperty(HashableStructuredProperty hsp) {
|
||||||
|
StructuredProperty sp = new StructuredProperty();
|
||||||
|
sp.setQualifier(hsp.getQualifier());
|
||||||
|
sp.setValue(hsp.getValue());
|
||||||
|
sp.setQualifier(hsp.getQualifier());
|
||||||
|
return sp;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return new HashCodeBuilder(11, 91)
|
||||||
|
.append(getQualifier().getClassid())
|
||||||
|
.append(getQualifier().getSchemeid())
|
||||||
|
.append(getValue())
|
||||||
|
.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (obj == this) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (obj.getClass() != getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final HashableStructuredProperty rhs = (HashableStructuredProperty) obj;
|
||||||
|
return new EqualsBuilder()
|
||||||
|
.append(getQualifier().getClassid(), rhs.getQualifier().getClassid())
|
||||||
|
.append(getQualifier().getSchemeid(), rhs.getQualifier().getSchemeid())
|
||||||
|
.append(getValue(), rhs.getValue())
|
||||||
|
.isEquals();
|
||||||
|
}
|
||||||
|
}
|
|
@ -43,34 +43,4 @@ public class CleaningFunctions {
|
||||||
return !PidBlacklistProvider.getBlacklist(s.getQualifier().getClassid()).contains(pidValue);
|
return !PidBlacklistProvider.getBlacklist(s.getQualifier().getClassid()).contains(pidValue);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Utility method that normalises PID values on a per-type basis.
|
|
||||||
* @param pid the PID whose value will be normalised.
|
|
||||||
* @return the PID containing the normalised value.
|
|
||||||
*/
|
|
||||||
public static StructuredProperty normalizePidValue(StructuredProperty pid) {
|
|
||||||
pid
|
|
||||||
.setValue(
|
|
||||||
normalizePidValue(
|
|
||||||
pid.getQualifier().getClassid(),
|
|
||||||
pid.getValue()));
|
|
||||||
|
|
||||||
return pid;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String normalizePidValue(String pidType, String pidValue) {
|
|
||||||
String value = Optional
|
|
||||||
.ofNullable(pidValue)
|
|
||||||
.map(String::trim)
|
|
||||||
.orElseThrow(() -> new IllegalArgumentException("PID value cannot be empty"));
|
|
||||||
|
|
||||||
switch (pidType) {
|
|
||||||
|
|
||||||
// TODO add cleaning for more PID types as needed
|
|
||||||
case "doi":
|
|
||||||
return value.toLowerCase().replaceFirst(DOI_PREFIX_REGEX, DOI_PREFIX);
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,18 +6,11 @@ import org.apache.commons.lang3.StringUtils;
|
||||||
public class DoiCleaningRule {
|
public class DoiCleaningRule {
|
||||||
|
|
||||||
public static String clean(final String doi) {
|
public static String clean(final String doi) {
|
||||||
return doi
|
if (doi == null)
|
||||||
.toLowerCase()
|
|
||||||
.replaceAll("\\s", "")
|
|
||||||
.replaceAll("^doi:", "")
|
|
||||||
.replaceFirst(CleaningFunctions.DOI_PREFIX_REGEX, CleaningFunctions.DOI_PREFIX);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static String normalizeDoi(final String input) {
|
|
||||||
if (input == null)
|
|
||||||
return null;
|
return null;
|
||||||
final String replaced = input
|
final String replaced = doi
|
||||||
.replaceAll("\\n|\\r|\\t|\\s", "")
|
.replaceAll("\\n|\\r|\\t|\\s", "")
|
||||||
|
.replaceAll("^doi:", "")
|
||||||
.toLowerCase()
|
.toLowerCase()
|
||||||
.replaceFirst(CleaningFunctions.DOI_PREFIX_REGEX, CleaningFunctions.DOI_PREFIX);
|
.replaceFirst(CleaningFunctions.DOI_PREFIX_REGEX, CleaningFunctions.DOI_PREFIX);
|
||||||
if (StringUtils.isEmpty(replaced))
|
if (StringUtils.isEmpty(replaced))
|
||||||
|
@ -32,7 +25,6 @@ public class DoiCleaningRule {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,7 +119,7 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
.getContext()
|
.getContext()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(c -> !StringUtils.startsWith(c.getId().toLowerCase(), contextId))
|
.filter(c -> !StringUtils.startsWith(c.getId().toLowerCase(), contextId))
|
||||||
.collect(Collectors.toList()));
|
.collect(Collectors.toCollection(ArrayList::new)));
|
||||||
}
|
}
|
||||||
return (T) res;
|
return (T) res;
|
||||||
} else {
|
} else {
|
||||||
|
@ -563,12 +563,24 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(i.getPid())
|
.ofNullable(i.getPid())
|
||||||
.ifPresent(pid -> {
|
.ifPresent(pid -> {
|
||||||
final Set<StructuredProperty> pids = Sets.newHashSet(pid);
|
final Set<HashableStructuredProperty> pids = pid
|
||||||
|
.stream()
|
||||||
|
.map(HashableStructuredProperty::newInstance)
|
||||||
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
Optional
|
Optional
|
||||||
.ofNullable(i.getAlternateIdentifier())
|
.ofNullable(i.getAlternateIdentifier())
|
||||||
.ifPresent(altId -> {
|
.ifPresent(altId -> {
|
||||||
final Set<StructuredProperty> altIds = Sets.newHashSet(altId);
|
final Set<HashableStructuredProperty> altIds = altId
|
||||||
i.setAlternateIdentifier(Lists.newArrayList(Sets.difference(altIds, pids)));
|
.stream()
|
||||||
|
.map(HashableStructuredProperty::newInstance)
|
||||||
|
.collect(Collectors.toCollection(HashSet::new));
|
||||||
|
i
|
||||||
|
.setAlternateIdentifier(
|
||||||
|
Sets
|
||||||
|
.difference(altIds, pids)
|
||||||
|
.stream()
|
||||||
|
.map(HashableStructuredProperty::toStructuredProperty)
|
||||||
|
.collect(Collectors.toList()));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1003,4 +1015,41 @@ public class GraphCleaningFunctions extends CleaningFunctions {
|
||||||
.orElse(null);
|
.orElse(null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implements bad and ugly things that we should get rid of ASAP.
|
||||||
|
*
|
||||||
|
* @param value
|
||||||
|
* @return
|
||||||
|
* @param <T>
|
||||||
|
*/
|
||||||
|
public static <T extends Oaf> T dedicatedUglyHacks(T value) {
|
||||||
|
if (value instanceof OafEntity) {
|
||||||
|
if (value instanceof Result) {
|
||||||
|
final Result r = (Result) value;
|
||||||
|
|
||||||
|
// Fix for AMS Acta
|
||||||
|
Optional
|
||||||
|
.ofNullable(r.getInstance())
|
||||||
|
.map(
|
||||||
|
instance -> instance
|
||||||
|
.stream()
|
||||||
|
.filter(
|
||||||
|
i -> Optional
|
||||||
|
.ofNullable(i.getHostedby())
|
||||||
|
.map(KeyValue::getKey)
|
||||||
|
.map(dsId -> dsId.equals("10|re3data_____::4cc76bed7ce2fb95fd8e7a2dfde16016"))
|
||||||
|
.orElse(false)))
|
||||||
|
.ifPresent(instance -> instance.forEach(i -> {
|
||||||
|
if (Optional
|
||||||
|
.ofNullable(i.getPid())
|
||||||
|
.map(pid -> pid.stream().noneMatch(p -> p.getValue().startsWith("10.6092/unibo/amsacta")))
|
||||||
|
.orElse(false)) {
|
||||||
|
i.setHostedby(UNKNOWN_REPOSITORY);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -175,7 +175,7 @@ public class IdentifierFactory implements Serializable {
|
||||||
return entity
|
return entity
|
||||||
.getPid()
|
.getPid()
|
||||||
.stream()
|
.stream()
|
||||||
.map(CleaningFunctions::normalizePidValue)
|
.map(PidCleaner::normalizePidValue)
|
||||||
.filter(CleaningFunctions::pidFilter)
|
.filter(CleaningFunctions::pidFilter)
|
||||||
.collect(
|
.collect(
|
||||||
Collectors
|
Collectors
|
||||||
|
@ -207,7 +207,7 @@ public class IdentifierFactory implements Serializable {
|
||||||
// filter away PIDs provided by a DS that is not considered an authority for the
|
// filter away PIDs provided by a DS that is not considered an authority for the
|
||||||
// given PID Type
|
// given PID Type
|
||||||
.filter(p -> shouldFilterPidByCriteria(collectedFrom, p, mapHandles))
|
.filter(p -> shouldFilterPidByCriteria(collectedFrom, p, mapHandles))
|
||||||
.map(CleaningFunctions::normalizePidValue)
|
.map(PidCleaner::normalizePidValue)
|
||||||
.filter(p -> isNotFromDelegatedAuthority(collectedFrom, p))
|
.filter(p -> isNotFromDelegatedAuthority(collectedFrom, p))
|
||||||
.filter(CleaningFunctions::pidFilter))
|
.filter(CleaningFunctions::pidFilter))
|
||||||
.orElse(Stream.empty());
|
.orElse(Stream.empty());
|
||||||
|
|
|
@ -96,7 +96,7 @@ public class MergeEntitiesComparator implements Comparator<Oaf> {
|
||||||
// id
|
// id
|
||||||
if (res == 0) {
|
if (res == 0) {
|
||||||
if (left instanceof OafEntity && right instanceof OafEntity) {
|
if (left instanceof OafEntity && right instanceof OafEntity) {
|
||||||
res = ((OafEntity) left).getId().compareTo(((OafEntity) right).getId());
|
res = ((OafEntity) right).getId().compareTo(((OafEntity) left).getId());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -433,7 +433,10 @@ public class MergeUtils {
|
||||||
|
|
||||||
// merge datainfo for same context id
|
// merge datainfo for same context id
|
||||||
merge.setContext(mergeLists(merge.getContext(), enrich.getContext(), trust, Context::getId, (r, l) -> {
|
merge.setContext(mergeLists(merge.getContext(), enrich.getContext(), trust, Context::getId, (r, l) -> {
|
||||||
r.getDataInfo().addAll(l.getDataInfo());
|
ArrayList<DataInfo> di = new ArrayList<>();
|
||||||
|
di.addAll(r.getDataInfo());
|
||||||
|
di.addAll(l.getDataInfo());
|
||||||
|
r.setDataInfo(di);
|
||||||
return r;
|
return r;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
@ -972,7 +975,7 @@ public class MergeUtils {
|
||||||
private static String extractKeyFromPid(final StructuredProperty pid) {
|
private static String extractKeyFromPid(final StructuredProperty pid) {
|
||||||
if (pid == null)
|
if (pid == null)
|
||||||
return null;
|
return null;
|
||||||
final StructuredProperty normalizedPid = CleaningFunctions.normalizePidValue(pid);
|
final StructuredProperty normalizedPid = PidCleaner.normalizePidValue(pid);
|
||||||
|
|
||||||
return String.format("%s::%s", normalizedPid.getQualifier().getClassid(), normalizedPid.getValue());
|
return String.format("%s::%s", normalizedPid.getQualifier().getClassid(), normalizedPid.getValue());
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,8 @@ public class PidValueComparator implements Comparator<StructuredProperty> {
|
||||||
if (right == null)
|
if (right == null)
|
||||||
return -1;
|
return -1;
|
||||||
|
|
||||||
StructuredProperty l = CleaningFunctions.normalizePidValue(left);
|
StructuredProperty l = PidCleaner.normalizePidValue(left);
|
||||||
StructuredProperty r = CleaningFunctions.normalizePidValue(right);
|
StructuredProperty r = PidCleaner.normalizePidValue(right);
|
||||||
|
|
||||||
return Optional
|
return Optional
|
||||||
.ofNullable(l.getValue())
|
.ofNullable(l.getValue())
|
||||||
|
|
|
@ -28,6 +28,7 @@ import com.jayway.jsonpath.JsonPath;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
import eu.dnetlib.dhp.schema.mdstore.MDStoreWithInfo;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
import net.minidev.json.JSONArray;
|
import net.minidev.json.JSONArray;
|
||||||
import scala.collection.JavaConverters;
|
import scala.collection.JavaConverters;
|
||||||
import scala.collection.Seq;
|
import scala.collection.Seq;
|
||||||
|
@ -104,7 +105,7 @@ public class DHPUtils {
|
||||||
|
|
||||||
public static String generateUnresolvedIdentifier(final String pid, final String pidType) {
|
public static String generateUnresolvedIdentifier(final String pid, final String pidType) {
|
||||||
|
|
||||||
final String cleanedPid = CleaningFunctions.normalizePidValue(pidType, pid);
|
final String cleanedPid = PidCleaner.normalizePidValue(pidType, pid);
|
||||||
|
|
||||||
return String.format("unresolved::%s::%s", cleanedPid, pidType.toLowerCase().trim());
|
return String.format("unresolved::%s::%s", cleanedPid, pidType.toLowerCase().trim());
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,109 +0,0 @@
|
||||||
|
|
||||||
package eu.dnetlib.dhp.common.api;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
|
||||||
import org.junit.jupiter.api.Assertions;
|
|
||||||
import org.junit.jupiter.api.Disabled;
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
|
|
||||||
@Disabled
|
|
||||||
class ZenodoAPIClientTest {
|
|
||||||
|
|
||||||
private final String URL_STRING = "https://sandbox.zenodo.org/api/deposit/depositions";
|
|
||||||
private final String ACCESS_TOKEN = "";
|
|
||||||
|
|
||||||
private final String CONCEPT_REC_ID = "657113";
|
|
||||||
|
|
||||||
private final String depositionId = "674915";
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testUploadOldDeposition() throws IOException, MissingConceptDoiException {
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
Assertions.assertEquals(200, client.uploadOpenDeposition(depositionId));
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/COVID-19.json.gz")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz"));
|
|
||||||
|
|
||||||
String metadata = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/common/api/metadata.json"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.sendMretadata(metadata));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testNewDeposition() throws IOException {
|
|
||||||
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
Assertions.assertEquals(201, client.newDeposition());
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/COVID-19.json.gz")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "COVID-19.json.gz"));
|
|
||||||
|
|
||||||
String metadata = IOUtils.toString(getClass().getResourceAsStream("/eu/dnetlib/dhp/common/api/metadata.json"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.sendMretadata(metadata));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testNewVersionNewName() throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
|
|
||||||
Assertions.assertEquals(201, client.newVersion(CONCEPT_REC_ID));
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/newVersion")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void testNewVersionOldName() throws IOException, MissingConceptDoiException {
|
|
||||||
|
|
||||||
ZenodoAPIClient client = new ZenodoAPIClient(URL_STRING,
|
|
||||||
ACCESS_TOKEN);
|
|
||||||
|
|
||||||
Assertions.assertEquals(201, client.newVersion(CONCEPT_REC_ID));
|
|
||||||
|
|
||||||
File file = new File(getClass()
|
|
||||||
.getResource("/eu/dnetlib/dhp/common/api/newVersion2")
|
|
||||||
.getPath());
|
|
||||||
|
|
||||||
InputStream is = new FileInputStream(file);
|
|
||||||
|
|
||||||
Assertions.assertEquals(200, client.uploadIS(is, "newVersion_deposition"));
|
|
||||||
|
|
||||||
Assertions.assertEquals(202, client.publish());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -29,7 +29,7 @@ class IdentifierFactoryTest {
|
||||||
"publication_doi2.json", "50|doi_________::79dbc7a2a56dc1532659f9038843256e", true);
|
"publication_doi2.json", "50|doi_________::79dbc7a2a56dc1532659f9038843256e", true);
|
||||||
|
|
||||||
verifyIdentifier(
|
verifyIdentifier(
|
||||||
"publication_doi3.json", "50|pmc_________::94e4cb08c93f8733b48e2445d04002ac", true);
|
"publication_doi3.json", "50|pmc_________::e2a339e0e11bfbf55462e14a07f1b304", true);
|
||||||
|
|
||||||
verifyIdentifier(
|
verifyIdentifier(
|
||||||
"publication_doi4.json", "50|od______2852::38861c44e6052a8d49f59a4c39ba5e66", true);
|
"publication_doi4.json", "50|od______2852::38861c44e6052a8d49f59a4c39ba5e66", true);
|
||||||
|
@ -41,7 +41,7 @@ class IdentifierFactoryTest {
|
||||||
"publication_pmc1.json", "50|DansKnawCris::0829b5191605bdbea36d6502b8c1ce1f", true);
|
"publication_pmc1.json", "50|DansKnawCris::0829b5191605bdbea36d6502b8c1ce1f", true);
|
||||||
|
|
||||||
verifyIdentifier(
|
verifyIdentifier(
|
||||||
"publication_pmc2.json", "50|pmc_________::94e4cb08c93f8733b48e2445d04002ac", true);
|
"publication_pmc2.json", "50|pmc_________::e2a339e0e11bfbf55462e14a07f1b304", true);
|
||||||
|
|
||||||
verifyIdentifier(
|
verifyIdentifier(
|
||||||
"publication_openapc.json", "50|doi_________::79dbc7a2a56dc1532659f9038843256e", true);
|
"publication_openapc.json", "50|doi_________::79dbc7a2a56dc1532659f9038843256e", true);
|
||||||
|
|
|
@ -177,7 +177,7 @@ class OafMapperUtilsTest {
|
||||||
assertTrue(cfId(d1.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
assertTrue(cfId(d1.getCollectedfrom()).contains(ModelConstants.CROSSREF_ID));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
ModelConstants.DATASET_RESULTTYPE_CLASSID,
|
ModelConstants.PUBLICATION_RESULTTYPE_CLASSID,
|
||||||
((Result) MergeUtils
|
((Result) MergeUtils
|
||||||
.merge(p2, d1))
|
.merge(p2, d1))
|
||||||
.getResulttype()
|
.getResulttype()
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"qualifier": {"classid": "pmc"},
|
"qualifier": {"classid": "pmc"},
|
||||||
"value": "21459329"
|
"value": "PMC21459329"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"qualifier":{"classid":"pmc"},
|
"qualifier":{"classid":"pmc"},
|
||||||
"value":"21459329"
|
"value":"PMC21459329"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,7 @@ public class TreeNodeDef implements Serializable {
|
||||||
// function for the evaluation of the node
|
// function for the evaluation of the node
|
||||||
public TreeNodeStats evaluate(Row doc1, Row doc2, Config conf) {
|
public TreeNodeStats evaluate(Row doc1, Row doc2, Config conf) {
|
||||||
|
|
||||||
TreeNodeStats stats = new TreeNodeStats(ignoreUndefined);
|
TreeNodeStats stats = new TreeNodeStats();
|
||||||
|
|
||||||
// for each field in the node, it computes the
|
// for each field in the node, it computes the
|
||||||
for (FieldConf fieldConf : fields) {
|
for (FieldConf fieldConf : fields) {
|
||||||
|
|
|
@ -9,11 +9,8 @@ public class TreeNodeStats implements Serializable {
|
||||||
|
|
||||||
private Map<String, FieldStats> results; // this is an accumulator for the results of the node
|
private Map<String, FieldStats> results; // this is an accumulator for the results of the node
|
||||||
|
|
||||||
private final boolean ignoreUndefined;
|
public TreeNodeStats() {
|
||||||
|
|
||||||
public TreeNodeStats(boolean ignoreUndefined) {
|
|
||||||
this.results = new HashMap<>();
|
this.results = new HashMap<>();
|
||||||
this.ignoreUndefined = ignoreUndefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, FieldStats> getResults() {
|
public Map<String, FieldStats> getResults() {
|
||||||
|
@ -25,10 +22,7 @@ public class TreeNodeStats implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
public int fieldsCount() {
|
public int fieldsCount() {
|
||||||
if (ignoreUndefined)
|
return this.results.size();
|
||||||
return this.results.size();
|
|
||||||
else
|
|
||||||
return this.results.size() - undefinedCount(); // do not count undefined
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int undefinedCount() {
|
public int undefinedCount() {
|
||||||
|
@ -84,22 +78,11 @@ public class TreeNodeStats implements Serializable {
|
||||||
double min = 100.0; // random high value
|
double min = 100.0; // random high value
|
||||||
for (FieldStats fs : this.results.values()) {
|
for (FieldStats fs : this.results.values()) {
|
||||||
if (fs.getResult() < min) {
|
if (fs.getResult() < min) {
|
||||||
if (fs.getResult() == -1) {
|
if (fs.getResult() >= 0.0 || (fs.getResult() == -1 && fs.isCountIfUndefined()))
|
||||||
if (fs.isCountIfUndefined()) {
|
|
||||||
min = 0.0;
|
|
||||||
} else {
|
|
||||||
min = -1;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
min = fs.getResult();
|
min = fs.getResult();
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (ignoreUndefined) {
|
return min;
|
||||||
return min == -1.0 ? 0.0 : min;
|
|
||||||
} else {
|
|
||||||
return min;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// if at least one is true, return 1.0
|
// if at least one is true, return 1.0
|
||||||
|
@ -108,11 +91,7 @@ public class TreeNodeStats implements Serializable {
|
||||||
if (fieldStats.getResult() >= fieldStats.getThreshold())
|
if (fieldStats.getResult() >= fieldStats.getThreshold())
|
||||||
return 1.0;
|
return 1.0;
|
||||||
}
|
}
|
||||||
if (!ignoreUndefined && undefinedCount() > 0) {
|
return 0.0;
|
||||||
return -1.0;
|
|
||||||
} else {
|
|
||||||
return 0.0;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// if at least one is false, return 0.0
|
// if at least one is false, return 0.0
|
||||||
|
@ -121,7 +100,7 @@ public class TreeNodeStats implements Serializable {
|
||||||
|
|
||||||
if (fieldStats.getResult() == -1) {
|
if (fieldStats.getResult() == -1) {
|
||||||
if (fieldStats.isCountIfUndefined())
|
if (fieldStats.isCountIfUndefined())
|
||||||
return ignoreUndefined ? 0.0 : -1.0;
|
return 0.0;
|
||||||
} else {
|
} else {
|
||||||
if (fieldStats.getResult() < fieldStats.getThreshold())
|
if (fieldStats.getResult() < fieldStats.getThreshold())
|
||||||
return 0.0;
|
return 0.0;
|
||||||
|
|
|
@ -44,10 +44,12 @@ public class TreeProcessor {
|
||||||
TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config);
|
TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config);
|
||||||
treeStats.addNodeStats(nextNodeName, stats);
|
treeStats.addNodeStats(nextNodeName, stats);
|
||||||
|
|
||||||
double finalScore = stats.getFinalScore(currentNode.getAggregation());
|
// if ignoreUndefined=false the miss is considered as undefined
|
||||||
if (finalScore == -1.0)
|
if (!currentNode.isIgnoreUndefined() && stats.undefinedCount() > 0) {
|
||||||
nextNodeName = currentNode.getUndefined();
|
nextNodeName = currentNode.getUndefined();
|
||||||
else if (finalScore >= currentNode.getThreshold()) {
|
}
|
||||||
|
// if ignoreUndefined=true the miss is ignored and the score computed anyway
|
||||||
|
else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) {
|
||||||
nextNodeName = currentNode.getPositive();
|
nextNodeName = currentNode.getPositive();
|
||||||
} else {
|
} else {
|
||||||
nextNodeName = currentNode.getNegative();
|
nextNodeName = currentNode.getNegative();
|
||||||
|
|
|
@ -10,7 +10,6 @@ import java.util.List;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.compress.BZip2Codec;
|
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
|
||||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
|
@ -29,6 +28,7 @@ import eu.dnetlib.dhp.schema.action.AtomicAction;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.DoiCleaningRule;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
@ -44,6 +44,10 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
public static final String BIP_AFFILIATIONS_CLASSID = "result:organization:openaireinference";
|
public static final String BIP_AFFILIATIONS_CLASSID = "result:organization:openaireinference";
|
||||||
public static final String BIP_AFFILIATIONS_CLASSNAME = "Affiliation relation inferred by OpenAIRE";
|
public static final String BIP_AFFILIATIONS_CLASSNAME = "Affiliation relation inferred by OpenAIRE";
|
||||||
public static final String BIP_INFERENCE_PROVENANCE = "openaire:affiliation";
|
public static final String BIP_INFERENCE_PROVENANCE = "openaire:affiliation";
|
||||||
|
public static final String OPENAIRE_DATASOURCE_ID = "10|infrastruct_::f66f1bd369679b5b077dcdf006089556";
|
||||||
|
public static final String OPENAIRE_DATASOURCE_NAME = "OpenAIRE";
|
||||||
|
public static final String DOI_URL_PREFIX = "https://doi.org/";
|
||||||
|
public static final int DOI_URL_PREFIX_LENGTH = 16;
|
||||||
|
|
||||||
public static <I extends Result> void main(String[] args) throws Exception {
|
public static <I extends Result> void main(String[] args) throws Exception {
|
||||||
|
|
||||||
|
@ -74,6 +78,9 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
final String webcrawlInputPath = parser.get("webCrawlInputPath");
|
final String webcrawlInputPath = parser.get("webCrawlInputPath");
|
||||||
log.info("webcrawlInputPath: {}", webcrawlInputPath);
|
log.info("webcrawlInputPath: {}", webcrawlInputPath);
|
||||||
|
|
||||||
|
final String publisherInputPath = parser.get("publisherInputPath");
|
||||||
|
log.info("publisherInputPath: {}", publisherInputPath);
|
||||||
|
|
||||||
final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
log.info("outputPath: {}", outputPath);
|
log.info("outputPath: {}", outputPath);
|
||||||
|
|
||||||
|
@ -84,43 +91,74 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
Constants.removeOutputDir(spark, outputPath);
|
Constants.removeOutputDir(spark, outputPath);
|
||||||
|
createActionSet(
|
||||||
List<KeyValue> collectedFromCrossref = OafMapperUtils
|
spark, crossrefInputPath, pubmedInputPath, openapcInputPath, dataciteInputPath, webcrawlInputPath,
|
||||||
.listKeyValues(ModelConstants.CROSSREF_ID, "Crossref");
|
publisherInputPath, outputPath);
|
||||||
JavaPairRDD<Text, Text> crossrefRelations = prepareAffiliationRelations(
|
|
||||||
spark, crossrefInputPath, collectedFromCrossref);
|
|
||||||
|
|
||||||
List<KeyValue> collectedFromPubmed = OafMapperUtils
|
|
||||||
.listKeyValues(ModelConstants.PUBMED_CENTRAL_ID, "Pubmed");
|
|
||||||
JavaPairRDD<Text, Text> pubmedRelations = prepareAffiliationRelations(
|
|
||||||
spark, pubmedInputPath, collectedFromPubmed);
|
|
||||||
|
|
||||||
List<KeyValue> collectedFromOpenAPC = OafMapperUtils
|
|
||||||
.listKeyValues(ModelConstants.OPEN_APC_ID, "OpenAPC");
|
|
||||||
JavaPairRDD<Text, Text> openAPCRelations = prepareAffiliationRelations(
|
|
||||||
spark, openapcInputPath, collectedFromOpenAPC);
|
|
||||||
|
|
||||||
List<KeyValue> collectedFromDatacite = OafMapperUtils
|
|
||||||
.listKeyValues(ModelConstants.DATACITE_ID, "Datacite");
|
|
||||||
JavaPairRDD<Text, Text> dataciteRelations = prepareAffiliationRelations(
|
|
||||||
spark, dataciteInputPath, collectedFromDatacite);
|
|
||||||
|
|
||||||
List<KeyValue> collectedFromWebCrawl = OafMapperUtils
|
|
||||||
.listKeyValues(Constants.WEB_CRAWL_ID, Constants.WEB_CRAWL_NAME);
|
|
||||||
JavaPairRDD<Text, Text> webCrawlRelations = prepareAffiliationRelations(
|
|
||||||
spark, webcrawlInputPath, collectedFromWebCrawl);
|
|
||||||
|
|
||||||
crossrefRelations
|
|
||||||
.union(pubmedRelations)
|
|
||||||
.union(openAPCRelations)
|
|
||||||
.union(dataciteRelations)
|
|
||||||
.union(webCrawlRelations)
|
|
||||||
.saveAsHadoopFile(
|
|
||||||
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
|
||||||
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void createActionSet(SparkSession spark, String crossrefInputPath, String pubmedInputPath,
|
||||||
|
String openapcInputPath, String dataciteInputPath, String webcrawlInputPath, String publisherlInputPath,
|
||||||
|
String outputPath) {
|
||||||
|
List<KeyValue> collectedfromOpenAIRE = OafMapperUtils
|
||||||
|
.listKeyValues(OPENAIRE_DATASOURCE_ID, OPENAIRE_DATASOURCE_NAME);
|
||||||
|
|
||||||
|
JavaPairRDD<Text, Text> crossrefRelations = prepareAffiliationRelationsNewModel(
|
||||||
|
spark, crossrefInputPath, collectedfromOpenAIRE);
|
||||||
|
|
||||||
|
JavaPairRDD<Text, Text> pubmedRelations = prepareAffiliationRelations(
|
||||||
|
spark, pubmedInputPath, collectedfromOpenAIRE);
|
||||||
|
|
||||||
|
JavaPairRDD<Text, Text> openAPCRelations = prepareAffiliationRelationsNewModel(
|
||||||
|
spark, openapcInputPath, collectedfromOpenAIRE);
|
||||||
|
|
||||||
|
JavaPairRDD<Text, Text> dataciteRelations = prepareAffiliationRelations(
|
||||||
|
spark, dataciteInputPath, collectedfromOpenAIRE);
|
||||||
|
|
||||||
|
JavaPairRDD<Text, Text> webCrawlRelations = prepareAffiliationRelations(
|
||||||
|
spark, webcrawlInputPath, collectedfromOpenAIRE);
|
||||||
|
|
||||||
|
JavaPairRDD<Text, Text> publisherRelations = prepareAffiliationRelationFromPublisher(
|
||||||
|
spark, publisherlInputPath, collectedfromOpenAIRE);
|
||||||
|
|
||||||
|
crossrefRelations
|
||||||
|
.union(pubmedRelations)
|
||||||
|
.union(openAPCRelations)
|
||||||
|
.union(dataciteRelations)
|
||||||
|
.union(webCrawlRelations)
|
||||||
|
.union(publisherRelations)
|
||||||
|
.saveAsHadoopFile(
|
||||||
|
outputPath, Text.class, Text.class, SequenceFileOutputFormat.class, BZip2Codec.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static JavaPairRDD<Text, Text> prepareAffiliationRelationFromPublisherNewModel(SparkSession spark,
|
||||||
|
String inputPath,
|
||||||
|
List<KeyValue> collectedfrom) {
|
||||||
|
|
||||||
|
Dataset<Row> df = spark
|
||||||
|
.read()
|
||||||
|
.schema(
|
||||||
|
"`DOI` STRING, `Organizations` ARRAY<STRUCT<`PID`:STRING, `Value`:STRING,`Confidence`:DOUBLE, `Status`:STRING>>")
|
||||||
|
.json(inputPath)
|
||||||
|
.where("DOI is not null");
|
||||||
|
|
||||||
|
return getTextTextJavaPairRDD(collectedfrom, df.selectExpr("DOI", "Organizations as Matchings"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static JavaPairRDD<Text, Text> prepareAffiliationRelationFromPublisher(SparkSession spark, String inputPath,
|
||||||
|
List<KeyValue> collectedfrom) {
|
||||||
|
|
||||||
|
Dataset<Row> df = spark
|
||||||
|
.read()
|
||||||
|
.schema("`DOI` STRING, `Organizations` ARRAY<STRUCT<`RORid`:STRING,`Confidence`:DOUBLE>>")
|
||||||
|
.json(inputPath)
|
||||||
|
.where("DOI is not null");
|
||||||
|
|
||||||
|
return getTextTextJavaPairRDD(collectedfrom, df.selectExpr("DOI", "Organizations as Matchings"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
private static <I extends Result> JavaPairRDD<Text, Text> prepareAffiliationRelations(SparkSession spark,
|
private static <I extends Result> JavaPairRDD<Text, Text> prepareAffiliationRelations(SparkSession spark,
|
||||||
String inputPath,
|
String inputPath,
|
||||||
List<KeyValue> collectedfrom) {
|
List<KeyValue> collectedfrom) {
|
||||||
|
@ -132,6 +170,24 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
.json(inputPath)
|
.json(inputPath)
|
||||||
.where("DOI is not null");
|
.where("DOI is not null");
|
||||||
|
|
||||||
|
return getTextTextJavaPairRDD(collectedfrom, df);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static <I extends Result> JavaPairRDD<Text, Text> prepareAffiliationRelationsNewModel(SparkSession spark,
|
||||||
|
String inputPath,
|
||||||
|
List<KeyValue> collectedfrom) {
|
||||||
|
// load and parse affiliation relations from HDFS
|
||||||
|
Dataset<Row> df = spark
|
||||||
|
.read()
|
||||||
|
.schema(
|
||||||
|
"`DOI` STRING, `Matchings` ARRAY<STRUCT<`PID`:STRING, `Value`:STRING,`Confidence`:DOUBLE, `Status`:STRING>>")
|
||||||
|
.json(inputPath)
|
||||||
|
.where("DOI is not null");
|
||||||
|
|
||||||
|
return getTextTextJavaPairRDDNew(collectedfrom, df);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDD(List<KeyValue> collectedfrom, Dataset<Row> df) {
|
||||||
// unroll nested arrays
|
// unroll nested arrays
|
||||||
df = df
|
df = df
|
||||||
.withColumn("matching", functions.explode(new Column("Matchings")))
|
.withColumn("matching", functions.explode(new Column("Matchings")))
|
||||||
|
@ -147,7 +203,7 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
|
|
||||||
// DOI to OpenAIRE id
|
// DOI to OpenAIRE id
|
||||||
final String paperId = ID_PREFIX
|
final String paperId = ID_PREFIX
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", row.getAs("doi")));
|
+ IdentifierFactory.md5(DoiCleaningRule.clean(removePrefix(row.getAs("doi"))));
|
||||||
|
|
||||||
// ROR id to OpenAIRE id
|
// ROR id to OpenAIRE id
|
||||||
final String affId = GenerateRorActionSetJob.calculateOpenaireId(row.getAs("rorid"));
|
final String affId = GenerateRorActionSetJob.calculateOpenaireId(row.getAs("rorid"));
|
||||||
|
@ -179,6 +235,69 @@ public class PrepareAffiliationRelations implements Serializable {
|
||||||
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static JavaPairRDD<Text, Text> getTextTextJavaPairRDDNew(List<KeyValue> collectedfrom, Dataset<Row> df) {
|
||||||
|
// unroll nested arrays
|
||||||
|
df = df
|
||||||
|
.withColumn("matching", functions.explode(new Column("Matchings")))
|
||||||
|
.select(
|
||||||
|
new Column("DOI").as("doi"),
|
||||||
|
new Column("matching.PID").as("pidtype"),
|
||||||
|
new Column("matching.Value").as("pidvalue"),
|
||||||
|
new Column("matching.Confidence").as("confidence"),
|
||||||
|
new Column("matching.Status").as("status"))
|
||||||
|
.where("status = 'active'");
|
||||||
|
|
||||||
|
// prepare action sets for affiliation relations
|
||||||
|
return df
|
||||||
|
.toJavaRDD()
|
||||||
|
.flatMap((FlatMapFunction<Row, Relation>) row -> {
|
||||||
|
|
||||||
|
// DOI to OpenAIRE id
|
||||||
|
final String paperId = ID_PREFIX
|
||||||
|
+ IdentifierFactory.md5(DoiCleaningRule.clean(removePrefix(row.getAs("doi"))));
|
||||||
|
|
||||||
|
// Organization to OpenAIRE identifier
|
||||||
|
String affId = null;
|
||||||
|
if (row.getAs("pidtype").equals("ROR"))
|
||||||
|
// ROR id to OpenIARE id
|
||||||
|
affId = GenerateRorActionSetJob.calculateOpenaireId(row.getAs("pidvalue"));
|
||||||
|
else
|
||||||
|
// getting the OpenOrgs identifier for the organization
|
||||||
|
affId = row.getAs("pidvalue");
|
||||||
|
|
||||||
|
Qualifier qualifier = OafMapperUtils
|
||||||
|
.qualifier(
|
||||||
|
BIP_AFFILIATIONS_CLASSID,
|
||||||
|
BIP_AFFILIATIONS_CLASSNAME,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS,
|
||||||
|
ModelConstants.DNET_PROVENANCE_ACTIONS);
|
||||||
|
|
||||||
|
// format data info; setting `confidence` into relation's `trust`
|
||||||
|
DataInfo dataInfo = OafMapperUtils
|
||||||
|
.dataInfo(
|
||||||
|
false,
|
||||||
|
BIP_INFERENCE_PROVENANCE,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
qualifier,
|
||||||
|
Double.toString(row.getAs("confidence")));
|
||||||
|
|
||||||
|
// return bi-directional relations
|
||||||
|
return getAffiliationRelationPair(paperId, affId, collectedfrom, dataInfo).iterator();
|
||||||
|
|
||||||
|
})
|
||||||
|
.map(p -> new AtomicAction(Relation.class, p))
|
||||||
|
.mapToPair(
|
||||||
|
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
|
||||||
|
new Text(OBJECT_MAPPER.writeValueAsString(aa))));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String removePrefix(String doi) {
|
||||||
|
if (doi.startsWith(DOI_URL_PREFIX))
|
||||||
|
return doi.substring(DOI_URL_PREFIX_LENGTH);
|
||||||
|
return doi;
|
||||||
|
}
|
||||||
|
|
||||||
private static List<Relation> getAffiliationRelationPair(String paperId, String affId, List<KeyValue> collectedfrom,
|
private static List<Relation> getAffiliationRelationPair(String paperId, String affId, List<KeyValue> collectedfrom,
|
||||||
DataInfo dataInfo) {
|
DataInfo dataInfo) {
|
||||||
return Arrays
|
return Arrays
|
||||||
|
|
|
@ -49,6 +49,9 @@ public class ReadCOCI implements Serializable {
|
||||||
final String workingPath = parser.get("inputPath");
|
final String workingPath = parser.get("inputPath");
|
||||||
log.info("workingPath {}", workingPath);
|
log.info("workingPath {}", workingPath);
|
||||||
|
|
||||||
|
final String backupPath = parser.get("backupPath");
|
||||||
|
log.info("backupPath {}", backupPath);
|
||||||
|
|
||||||
SparkConf sconf = new SparkConf();
|
SparkConf sconf = new SparkConf();
|
||||||
|
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
|
@ -68,12 +71,14 @@ public class ReadCOCI implements Serializable {
|
||||||
workingPath,
|
workingPath,
|
||||||
fileSystem,
|
fileSystem,
|
||||||
outputPath,
|
outputPath,
|
||||||
|
backupPath,
|
||||||
delimiter);
|
delimiter);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void doRead(SparkSession spark, String workingPath, FileSystem fileSystem,
|
private static void doRead(SparkSession spark, String workingPath, FileSystem fileSystem,
|
||||||
String outputPath,
|
String outputPath,
|
||||||
|
String backupPath,
|
||||||
String delimiter) throws IOException {
|
String delimiter) throws IOException {
|
||||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||||
.listFiles(
|
.listFiles(
|
||||||
|
@ -107,7 +112,8 @@ public class ReadCOCI implements Serializable {
|
||||||
.mode(SaveMode.Append)
|
.mode(SaveMode.Append)
|
||||||
.option("compression", "gzip")
|
.option("compression", "gzip")
|
||||||
.json(outputPath);
|
.json(outputPath);
|
||||||
fileSystem.rename(fileStatus.getPath(), new Path("/tmp/miriam/OC/DONE"));
|
|
||||||
|
fileSystem.rename(fileStatus.getPath(), new Path(backupPath));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,11 +46,11 @@ public class CollectorWorker extends ReportingJob {
|
||||||
private final HttpClientParams clientParams;
|
private final HttpClientParams clientParams;
|
||||||
|
|
||||||
public CollectorWorker(
|
public CollectorWorker(
|
||||||
final ApiDescriptor api,
|
final ApiDescriptor api,
|
||||||
final FileSystem fileSystem,
|
final FileSystem fileSystem,
|
||||||
final MDStoreVersion mdStoreVersion,
|
final MDStoreVersion mdStoreVersion,
|
||||||
final HttpClientParams clientParams,
|
final HttpClientParams clientParams,
|
||||||
final AggregatorReport report) {
|
final AggregatorReport report) {
|
||||||
super(report);
|
super(report);
|
||||||
this.api = api;
|
this.api = api;
|
||||||
this.fileSystem = fileSystem;
|
this.fileSystem = fileSystem;
|
||||||
|
@ -69,22 +69,25 @@ public class CollectorWorker extends ReportingJob {
|
||||||
scheduleReport(counter);
|
scheduleReport(counter);
|
||||||
|
|
||||||
try (SequenceFile.Writer writer = SequenceFile
|
try (SequenceFile.Writer writer = SequenceFile
|
||||||
.createWriter(this.fileSystem.getConf(), SequenceFile.Writer.file(new Path(outputPath)), SequenceFile.Writer
|
.createWriter(
|
||||||
.keyClass(IntWritable.class), SequenceFile.Writer
|
this.fileSystem.getConf(), SequenceFile.Writer.file(new Path(outputPath)), SequenceFile.Writer
|
||||||
.valueClass(Text.class), SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
|
.keyClass(IntWritable.class),
|
||||||
|
SequenceFile.Writer
|
||||||
|
.valueClass(Text.class),
|
||||||
|
SequenceFile.Writer.compression(SequenceFile.CompressionType.BLOCK, new DeflateCodec()))) {
|
||||||
final IntWritable key = new IntWritable(counter.get());
|
final IntWritable key = new IntWritable(counter.get());
|
||||||
final Text value = new Text();
|
final Text value = new Text();
|
||||||
plugin
|
plugin
|
||||||
.collect(this.api, this.report)
|
.collect(this.api, this.report)
|
||||||
.forEach(content -> {
|
.forEach(content -> {
|
||||||
key.set(counter.getAndIncrement());
|
key.set(counter.getAndIncrement());
|
||||||
value.set(content);
|
value.set(content);
|
||||||
try {
|
try {
|
||||||
writer.append(key, value);
|
writer.append(key, value);
|
||||||
} catch (final Throwable e) {
|
} catch (final Throwable e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (final Throwable e) {
|
} catch (final Throwable e) {
|
||||||
this.report.put(e.getClass().getName(), e.getMessage());
|
this.report.put(e.getClass().getName(), e.getMessage());
|
||||||
throw new CollectorException(e);
|
throw new CollectorException(e);
|
||||||
|
@ -112,36 +115,36 @@ public class CollectorWorker extends ReportingJob {
|
||||||
private CollectorPlugin getCollectorPlugin() throws UnknownCollectorPluginException {
|
private CollectorPlugin getCollectorPlugin() throws UnknownCollectorPluginException {
|
||||||
|
|
||||||
switch (CollectorPlugin.NAME.valueOf(this.api.getProtocol())) {
|
switch (CollectorPlugin.NAME.valueOf(this.api.getProtocol())) {
|
||||||
case oai:
|
case oai:
|
||||||
return new OaiCollectorPlugin(this.clientParams);
|
return new OaiCollectorPlugin(this.clientParams);
|
||||||
case rest_json2xml:
|
case rest_json2xml:
|
||||||
return new RestCollectorPlugin(this.clientParams);
|
return new RestCollectorPlugin(this.clientParams);
|
||||||
case file:
|
case file:
|
||||||
return new FileCollectorPlugin(this.fileSystem);
|
return new FileCollectorPlugin(this.fileSystem);
|
||||||
case fileGzip:
|
case fileGzip:
|
||||||
return new FileGZipCollectorPlugin(this.fileSystem);
|
return new FileGZipCollectorPlugin(this.fileSystem);
|
||||||
case baseDump:
|
case baseDump:
|
||||||
return new BaseCollectorPlugin(this.fileSystem);
|
return new BaseCollectorPlugin(this.fileSystem);
|
||||||
case gtr2Publications:
|
case gtr2Publications:
|
||||||
return new Gtr2PublicationsCollectorPlugin(this.clientParams);
|
return new Gtr2PublicationsCollectorPlugin(this.clientParams);
|
||||||
case osfPreprints:
|
case osfPreprints:
|
||||||
return new OsfPreprintsCollectorPlugin(this.clientParams);
|
return new OsfPreprintsCollectorPlugin(this.clientParams);
|
||||||
case other:
|
case other:
|
||||||
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
final CollectorPlugin.NAME.OTHER_NAME plugin = Optional
|
||||||
.ofNullable(this.api.getParams().get("other_plugin_type"))
|
.ofNullable(this.api.getParams().get("other_plugin_type"))
|
||||||
.map(CollectorPlugin.NAME.OTHER_NAME::valueOf)
|
.map(CollectorPlugin.NAME.OTHER_NAME::valueOf)
|
||||||
.orElseThrow(() -> new IllegalArgumentException("invalid other_plugin_type"));
|
.orElseThrow(() -> new IllegalArgumentException("invalid other_plugin_type"));
|
||||||
|
|
||||||
switch (plugin) {
|
switch (plugin) {
|
||||||
case mdstore_mongodb_dump:
|
case mdstore_mongodb_dump:
|
||||||
return new MongoDbDumpCollectorPlugin(this.fileSystem);
|
return new MongoDbDumpCollectorPlugin(this.fileSystem);
|
||||||
case mdstore_mongodb:
|
case mdstore_mongodb:
|
||||||
return new MDStoreCollectorPlugin();
|
return new MDStoreCollectorPlugin();
|
||||||
|
default:
|
||||||
|
throw new UnknownCollectorPluginException("plugin is not managed: " + plugin);
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
throw new UnknownCollectorPluginException("plugin is not managed: " + plugin);
|
throw new UnknownCollectorPluginException("protocol is not managed: " + this.api.getProtocol());
|
||||||
}
|
|
||||||
default:
|
|
||||||
throw new UnknownCollectorPluginException("protocol is not managed: " + this.api.getProtocol());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -31,17 +31,19 @@ public class OsfPreprintsCollectorPlugin implements CollectorPlugin {
|
||||||
final String baseUrl = api.getBaseUrl();
|
final String baseUrl = api.getBaseUrl();
|
||||||
|
|
||||||
final int pageSize = Optional
|
final int pageSize = Optional
|
||||||
.ofNullable(api.getParams().get("pageSize"))
|
.ofNullable(api.getParams().get("pageSize"))
|
||||||
.filter(StringUtils::isNotBlank)
|
.filter(StringUtils::isNotBlank)
|
||||||
.map(s -> NumberUtils.toInt(s, PAGE_SIZE_VALUE_DEFAULT))
|
.map(s -> NumberUtils.toInt(s, PAGE_SIZE_VALUE_DEFAULT))
|
||||||
.orElse(PAGE_SIZE_VALUE_DEFAULT);
|
.orElse(PAGE_SIZE_VALUE_DEFAULT);
|
||||||
|
|
||||||
if (StringUtils.isBlank(baseUrl)) { throw new CollectorException("Param 'baseUrl' is null or empty"); }
|
if (StringUtils.isBlank(baseUrl)) {
|
||||||
|
throw new CollectorException("Param 'baseUrl' is null or empty");
|
||||||
|
}
|
||||||
|
|
||||||
final OsfPreprintsIterator it = new OsfPreprintsIterator(baseUrl, pageSize, getClientParams());
|
final OsfPreprintsIterator it = new OsfPreprintsIterator(baseUrl, pageSize, getClientParams());
|
||||||
|
|
||||||
return StreamSupport
|
return StreamSupport
|
||||||
.stream(Spliterators.spliteratorUnknownSize(it, Spliterator.ORDERED), false);
|
.stream(Spliterators.spliteratorUnknownSize(it, Spliterator.ORDERED), false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public HttpClientParams getClientParams() {
|
public HttpClientParams getClientParams() {
|
||||||
|
|
|
@ -34,9 +34,9 @@ public class OsfPreprintsIterator implements Iterator<String> {
|
||||||
private final Queue<String> recordQueue = new PriorityBlockingQueue<>();
|
private final Queue<String> recordQueue = new PriorityBlockingQueue<>();
|
||||||
|
|
||||||
public OsfPreprintsIterator(
|
public OsfPreprintsIterator(
|
||||||
final String baseUrl,
|
final String baseUrl,
|
||||||
final int pageSize,
|
final int pageSize,
|
||||||
final HttpClientParams clientParams) {
|
final HttpClientParams clientParams) {
|
||||||
|
|
||||||
this.clientParams = clientParams;
|
this.clientParams = clientParams;
|
||||||
this.baseUrl = baseUrl;
|
this.baseUrl = baseUrl;
|
||||||
|
@ -54,7 +54,8 @@ public class OsfPreprintsIterator implements Iterator<String> {
|
||||||
@Override
|
@Override
|
||||||
public boolean hasNext() {
|
public boolean hasNext() {
|
||||||
synchronized (this.recordQueue) {
|
synchronized (this.recordQueue) {
|
||||||
while (this.recordQueue.isEmpty() && StringUtils.isNotBlank(this.currentUrl) && this.currentUrl.startsWith("http")) {
|
while (this.recordQueue.isEmpty() && StringUtils.isNotBlank(this.currentUrl)
|
||||||
|
&& this.currentUrl.startsWith("http")) {
|
||||||
try {
|
try {
|
||||||
this.currentUrl = downloadPage(this.currentUrl);
|
this.currentUrl = downloadPage(this.currentUrl);
|
||||||
} catch (final CollectorException e) {
|
} catch (final CollectorException e) {
|
||||||
|
@ -63,7 +64,9 @@ public class OsfPreprintsIterator implements Iterator<String> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this.recordQueue.isEmpty()) { return true; }
|
if (!this.recordQueue.isEmpty()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -112,7 +115,9 @@ public class OsfPreprintsIterator implements Iterator<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Document downloadUrl(final String url, final int attempt) throws CollectorException {
|
private Document downloadUrl(final String url, final int attempt) throws CollectorException {
|
||||||
if (attempt > MAX_ATTEMPTS) { throw new CollectorException("Max Number of attempts reached, url:" + url); }
|
if (attempt > MAX_ATTEMPTS) {
|
||||||
|
throw new CollectorException("Max Number of attempts reached, url:" + url);
|
||||||
|
}
|
||||||
|
|
||||||
if (attempt > 0) {
|
if (attempt > 0) {
|
||||||
final int delay = (attempt * 5000);
|
final int delay = (attempt * 5000);
|
||||||
|
|
|
@ -28,13 +28,19 @@
|
||||||
"paramLongName": "dataciteInputPath",
|
"paramLongName": "dataciteInputPath",
|
||||||
"paramDescription": "the path to get the input data from Datacite",
|
"paramDescription": "the path to get the input data from Datacite",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
},{
|
},
|
||||||
|
{
|
||||||
"paramName": "wip",
|
"paramName": "wip",
|
||||||
"paramLongName": "webCrawlInputPath",
|
"paramLongName": "webCrawlInputPath",
|
||||||
"paramDescription": "the path to get the input data from Web Crawl",
|
"paramDescription": "the path to get the input data from Web Crawl",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
}
|
},
|
||||||
,
|
{
|
||||||
|
"paramName": "pub",
|
||||||
|
"paramLongName": "publisherInputPath",
|
||||||
|
"paramDescription": "the path to get the input data from publishers",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"paramName": "o",
|
"paramName": "o",
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "outputPath",
|
||||||
|
|
|
@ -35,5 +35,6 @@ crossrefInputPath=/data/bip-affiliations/crossref-data.json
|
||||||
pubmedInputPath=/data/bip-affiliations/pubmed-data.json
|
pubmedInputPath=/data/bip-affiliations/pubmed-data.json
|
||||||
openapcInputPath=/data/bip-affiliations/openapc-data.json
|
openapcInputPath=/data/bip-affiliations/openapc-data.json
|
||||||
dataciteInputPath=/data/bip-affiliations/datacite-data.json
|
dataciteInputPath=/data/bip-affiliations/datacite-data.json
|
||||||
|
webCrawlInputPath=/data/bip-affiliations/webCrawl/
|
||||||
|
|
||||||
outputPath=/tmp/crossref-affiliations-output-v5
|
outputPath=/tmp/crossref-affiliations-output-v5
|
||||||
|
|
|
@ -21,6 +21,10 @@
|
||||||
<name>webCrawlInputPath</name>
|
<name>webCrawlInputPath</name>
|
||||||
<description>the path where to find the inferred affiliation relations from webCrawl</description>
|
<description>the path where to find the inferred affiliation relations from webCrawl</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>publisherInputPath</name>
|
||||||
|
<description>the path where to find the inferred affiliation relations from publisher websites</description>
|
||||||
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>outputPath</name>
|
<name>outputPath</name>
|
||||||
<description>the path where to store the actionset</description>
|
<description>the path where to store the actionset</description>
|
||||||
|
@ -117,6 +121,7 @@
|
||||||
<arg>--openapcInputPath</arg><arg>${openapcInputPath}</arg>
|
<arg>--openapcInputPath</arg><arg>${openapcInputPath}</arg>
|
||||||
<arg>--dataciteInputPath</arg><arg>${dataciteInputPath}</arg>
|
<arg>--dataciteInputPath</arg><arg>${dataciteInputPath}</arg>
|
||||||
<arg>--webCrawlInputPath</arg><arg>${webCrawlInputPath}</arg>
|
<arg>--webCrawlInputPath</arg><arg>${webCrawlInputPath}</arg>
|
||||||
|
<arg>--publisherInputPath</arg><arg>${publisherInputPath}</arg>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
<arg>--outputPath</arg><arg>${outputPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
|
|
|
@ -24,12 +24,19 @@
|
||||||
"paramLongName": "outputPath",
|
"paramLongName": "outputPath",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
}, {
|
},
|
||||||
"paramName": "nn",
|
{
|
||||||
"paramLongName": "hdfsNameNode",
|
"paramName": "nn",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramLongName": "hdfsNameNode",
|
||||||
"paramRequired": true
|
"paramDescription": "the hdfs name node",
|
||||||
}
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "bp",
|
||||||
|
"paramLongName": "backupPath",
|
||||||
|
"paramDescription": "the hdfs path to move the OC data after the extraction",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -129,6 +129,7 @@
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--inputPath</arg><arg>${inputPath}/Extracted</arg>
|
<arg>--inputPath</arg><arg>${inputPath}/Extracted</arg>
|
||||||
<arg>--outputPath</arg><arg>${inputPath}/JSON</arg>
|
<arg>--outputPath</arg><arg>${inputPath}/JSON</arg>
|
||||||
|
<arg>--backupPath</arg><arg>${inputPath}/backup</arg>
|
||||||
<arg>--delimiter</arg><arg>${delimiter}</arg>
|
<arg>--delimiter</arg><arg>${delimiter}</arg>
|
||||||
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
|
|
@ -16,10 +16,11 @@
|
||||||
"paramLongName": "isSparkSessionManged",
|
"paramLongName": "isSparkSessionManged",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramDescription": "the hdfs name node",
|
||||||
"paramRequired": false
|
"paramRequired": false
|
||||||
},{
|
},
|
||||||
"paramName": "nn",
|
{
|
||||||
"paramLongName": "nameNode",
|
"paramName": "nn",
|
||||||
"paramDescription": "the hdfs name node",
|
"paramLongName": "nameNode",
|
||||||
"paramRequired": true
|
"paramDescription": "the hdfs name node",
|
||||||
}
|
"paramRequired": true
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -24,7 +24,7 @@
|
||||||
|
|
||||||
<decision name="resume_from">
|
<decision name="resume_from">
|
||||||
<switch>
|
<switch>
|
||||||
<case to="download">${wf:conf('resumeFrom') eq 'DownloadDump'}</case>
|
<case to="reset_workingDir">${wf:conf('resumeFrom') eq 'DownloadDump'}</case>
|
||||||
<default to="create_actionset"/> <!-- first action to be done when downloadDump is to be performed -->
|
<default to="create_actionset"/> <!-- first action to be done when downloadDump is to be performed -->
|
||||||
</switch>
|
</switch>
|
||||||
</decision>
|
</decision>
|
||||||
|
@ -33,6 +33,14 @@
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="reset_workingDir">
|
||||||
|
<fs>
|
||||||
|
<delete path="${workingDir}"/>
|
||||||
|
<mkdir path="${workingDir}"/>
|
||||||
|
</fs>
|
||||||
|
<ok to="download"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
<action name="download">
|
<action name="download">
|
||||||
<shell xmlns="uri:oozie:shell-action:0.2">
|
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
<workflow-app name="Transform_BioEntity_Workflow" xmlns="uri:oozie:workflow:0.5">
|
<workflow-app name="Transform_BioEntity_Workflow" xmlns="uri:oozie:workflow:0.5">
|
||||||
<parameters>
|
<parameters>
|
||||||
<property>
|
<property>
|
||||||
<name>sourcePath</name>
|
<name>sourcePath</name>
|
||||||
|
@ -8,19 +8,40 @@
|
||||||
<name>database</name>
|
<name>database</name>
|
||||||
<description>the PDB Database Working Path</description>
|
<description>the PDB Database Working Path</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>targetPath</name>
|
<name>mdStoreOutputId</name>
|
||||||
<description>the Target Working dir path</description>
|
<description>the identifier of the cleaned MDStore</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>mdStoreManagerURI</name>
|
||||||
|
<description>the path of the cleaned mdstore</description>
|
||||||
</property>
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="ConvertDB"/>
|
<start to="StartTransaction"/>
|
||||||
|
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
|
<action name="StartTransaction">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>NEW_VERSION</arg>
|
||||||
|
<arg>--mdStoreID</arg><arg>${mdStoreOutputId}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="ConvertDB"/>
|
||||||
|
<error to="RollBack"/>
|
||||||
|
</action>
|
||||||
<action name="ConvertDB">
|
<action name="ConvertDB">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
|
@ -41,11 +62,48 @@
|
||||||
<arg>--master</arg><arg>yarn</arg>
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
<arg>--dbPath</arg><arg>${sourcePath}</arg>
|
<arg>--dbPath</arg><arg>${sourcePath}</arg>
|
||||||
<arg>--database</arg><arg>${database}</arg>
|
<arg>--database</arg><arg>${database}</arg>
|
||||||
<arg>--targetPath</arg><arg>${targetPath}</arg>
|
<arg>--mdstoreOutputVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="CommitVersion"/>
|
||||||
<error to="Kill"/>
|
<error to="RollBack"/>
|
||||||
|
|
||||||
</action>
|
</action>
|
||||||
<end name="End"/>
|
<action name="CommitVersion">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>COMMIT</arg>
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="RollBack">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>ROLLBACK</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="Kill"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -2,5 +2,5 @@
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
||||||
{"paramName":"db", "paramLongName":"database", "paramDescription": "should be PDB or UNIPROT", "paramRequired": true},
|
{"paramName":"db", "paramLongName":"database", "paramDescription": "should be PDB or UNIPROT", "paramRequired": true},
|
||||||
{"paramName":"p", "paramLongName":"dbPath", "paramDescription": "the path of the database to transform", "paramRequired": true},
|
{"paramName":"p", "paramLongName":"dbPath", "paramDescription": "the path of the database to transform", "paramRequired": true},
|
||||||
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the OAF target path ", "paramRequired": true}
|
{"paramName":"mo", "paramLongName":"mdstoreOutputVersion", "paramDescription": "the oaf path ", "paramRequired": true}
|
||||||
]
|
]
|
|
@ -1,5 +1,20 @@
|
||||||
[
|
[
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{
|
||||||
{"paramName":"s", "paramLongName":"sourcePath","paramDescription": "the source Path", "paramRequired": true},
|
"paramName": "mt",
|
||||||
{"paramName":"t", "paramLongName":"targetPath","paramDescription": "the oaf path ", "paramRequired": true}
|
"paramLongName": "master",
|
||||||
|
"paramDescription": "should be local or yarn",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "s",
|
||||||
|
"paramLongName": "sourcePath",
|
||||||
|
"paramDescription": "the source Path",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "mo",
|
||||||
|
"paramLongName": "mdstoreOutputVersion",
|
||||||
|
"paramDescription": "the oaf path ",
|
||||||
|
"paramRequired": true
|
||||||
|
}
|
||||||
]
|
]
|
|
@ -9,34 +9,26 @@
|
||||||
<description>the Working Path</description>
|
<description>the Working Path</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>targetPath</name>
|
<name>mdStoreOutputId</name>
|
||||||
<description>the OAF MDStore Path</description>
|
<description>the identifier of the cleaned MDStore</description>
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>sparkDriverMemory</name>
|
<name>mdStoreManagerURI</name>
|
||||||
<description>memory for driver process</description>
|
<description>the path of the cleaned mdstore</description>
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorMemory</name>
|
|
||||||
<description>memory for individual executor</description>
|
|
||||||
</property>
|
|
||||||
<property>
|
|
||||||
<name>sparkExecutorCores</name>
|
|
||||||
<description>number of cores used by single executor</description>
|
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>resumeFrom</name>
|
<name>resumeFrom</name>
|
||||||
<value>DownloadEBILinks</value>
|
<value>CreateEBIDataSet</value>
|
||||||
<description>node to start</description>
|
<description>node to start</description>
|
||||||
</property>
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="resume_from"/>
|
<start to="StartTransaction"/>
|
||||||
|
|
||||||
<decision name="resume_from">
|
<decision name="resume_from">
|
||||||
<switch>
|
<switch>
|
||||||
<case to="DownloadEBILinks">${wf:conf('resumeFrom') eq 'DownloadEBILinks'}</case>
|
<case to="DownloadEBILinks">${wf:conf('resumeFrom') eq 'DownloadEBILinks'}</case>
|
||||||
<case to="CreateEBIDataSet">${wf:conf('resumeFrom') eq 'CreateEBIDataSet'}</case>
|
<case to="StartTransaction">${wf:conf('resumeFrom') eq 'CreateEBIDataSet'}</case>
|
||||||
<default to="DownloadEBILinks"/>
|
<default to="DownloadEBILinks"/>
|
||||||
</switch>
|
</switch>
|
||||||
</decision>
|
</decision>
|
||||||
|
@ -77,9 +69,29 @@
|
||||||
<move source="${sourcePath}/ebi_links_dataset" target="${sourcePath}/ebi_links_dataset_old"/>
|
<move source="${sourcePath}/ebi_links_dataset" target="${sourcePath}/ebi_links_dataset_old"/>
|
||||||
<move source="${workingPath}/links_final" target="${sourcePath}/ebi_links_dataset"/>
|
<move source="${workingPath}/links_final" target="${sourcePath}/ebi_links_dataset"/>
|
||||||
</fs>
|
</fs>
|
||||||
<ok to="CreateEBIDataSet"/>
|
<ok to="StartTransaction"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
<action name="StartTransaction">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>NEW_VERSION</arg>
|
||||||
|
<arg>--mdStoreID</arg><arg>${mdStoreOutputId}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
<capture-output/>
|
||||||
|
</java>
|
||||||
|
<ok to="CreateEBIDataSet"/>
|
||||||
|
<error to="RollBack"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<action name="CreateEBIDataSet">
|
<action name="CreateEBIDataSet">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn-cluster</master>
|
<master>yarn-cluster</master>
|
||||||
|
@ -95,11 +107,49 @@
|
||||||
${sparkExtraOPT}
|
${sparkExtraOPT}
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--sourcePath</arg><arg>${sourcePath}/ebi_links_dataset</arg>
|
<arg>--sourcePath</arg><arg>${sourcePath}/ebi_links_dataset</arg>
|
||||||
<arg>--targetPath</arg><arg>${targetPath}</arg>
|
<arg>--mdstoreOutputVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
<arg>--master</arg><arg>yarn</arg>
|
<arg>--master</arg><arg>yarn</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="CommitVersion">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>COMMIT</arg>
|
||||||
|
<arg>--namenode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="RollBack">
|
||||||
|
<java>
|
||||||
|
<configuration>
|
||||||
|
<property>
|
||||||
|
<name>oozie.launcher.mapreduce.user.classpath.first</name>
|
||||||
|
<value>true</value>
|
||||||
|
</property>
|
||||||
|
</configuration>
|
||||||
|
<main-class>eu.dnetlib.dhp.aggregation.mdstore.MDStoreActionNode</main-class>
|
||||||
|
<arg>--action</arg><arg>ROLLBACK</arg>
|
||||||
|
<arg>--mdStoreVersion</arg><arg>${wf:actionData('StartTransaction')['mdStoreVersion']}</arg>
|
||||||
|
<arg>--mdStoreManagerURI</arg><arg>${mdStoreManagerURI}</arg>
|
||||||
|
</java>
|
||||||
|
<ok to="Kill"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -332,7 +332,7 @@ case object Crossref2Oaf {
|
||||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
|
|
||||||
//MAPPING Crossref DOI into PID
|
//MAPPING Crossref DOI into PID
|
||||||
val doi: String = DoiCleaningRule.normalizeDoi((json \ "DOI").extract[String])
|
val doi: String = DoiCleaningRule.clean((json \ "DOI").extract[String])
|
||||||
result.setPid(
|
result.setPid(
|
||||||
List(
|
List(
|
||||||
structuredProperty(
|
structuredProperty(
|
||||||
|
@ -504,6 +504,24 @@ case object Crossref2Oaf {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
val is_review = json \ "relation" \ "is-review-of" \ "id"
|
||||||
|
|
||||||
|
if (is_review != JNothing) {
|
||||||
|
instance.setInstancetype(
|
||||||
|
OafMapperUtils.qualifier(
|
||||||
|
"0015",
|
||||||
|
"peerReviewed",
|
||||||
|
ModelConstants.DNET_REVIEW_LEVELS,
|
||||||
|
ModelConstants.DNET_REVIEW_LEVELS
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (doi.startsWith("10.3410") || doi.startsWith("10.12703"))
|
||||||
|
instance.setHostedby(
|
||||||
|
OafMapperUtils.keyValue(OafMapperUtils.createOpenaireId(10, "openaire____::H1Connect", true), "H1Connect")
|
||||||
|
)
|
||||||
|
|
||||||
instance.setAccessright(
|
instance.setAccessright(
|
||||||
decideAccessRight(instance.getLicense, result.getDateofacceptance.getValue)
|
decideAccessRight(instance.getLicense, result.getDateofacceptance.getValue)
|
||||||
)
|
)
|
||||||
|
@ -655,7 +673,7 @@ case object Crossref2Oaf {
|
||||||
val doi = input.getString(0)
|
val doi = input.getString(0)
|
||||||
val rorId = input.getString(1)
|
val rorId = input.getString(1)
|
||||||
|
|
||||||
val pubId = s"50|${PidType.doi.toString.padTo(12, "_")}::${DoiCleaningRule.normalizeDoi(doi)}"
|
val pubId = s"50|${PidType.doi.toString.padTo(12, "_")}::${DoiCleaningRule.clean(doi)}"
|
||||||
val affId = GenerateRorActionSetJob.calculateOpenaireId(rorId)
|
val affId = GenerateRorActionSetJob.calculateOpenaireId(rorId)
|
||||||
|
|
||||||
val r: Relation = new Relation
|
val r: Relation = new Relation
|
||||||
|
|
|
@ -231,7 +231,7 @@ object BioDBToOAF {
|
||||||
def uniprotToOAF(input: String): List[Oaf] = {
|
def uniprotToOAF(input: String): List[Oaf] = {
|
||||||
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
implicit lazy val formats: DefaultFormats.type = org.json4s.DefaultFormats
|
||||||
lazy val json = parse(input)
|
lazy val json = parse(input)
|
||||||
val pid = (json \ "pid").extract[String]
|
val pid = (json \ "pid").extract[String].trim()
|
||||||
|
|
||||||
val d = new Dataset
|
val d = new Dataset
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,15 @@ package eu.dnetlib.dhp.sx.bio
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
import eu.dnetlib.dhp.collection.CollectionUtils
|
import eu.dnetlib.dhp.collection.CollectionUtils
|
||||||
|
import eu.dnetlib.dhp.common.Constants.{MDSTORE_DATA_PATH, MDSTORE_SIZE_PATH}
|
||||||
|
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf
|
import eu.dnetlib.dhp.schema.oaf.Oaf
|
||||||
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
import eu.dnetlib.dhp.sx.bio.BioDBToOAF.ScholixResolved
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils.{MAPPER, writeHdfsFile}
|
||||||
|
|
||||||
object SparkTransformBioDatabaseToOAF {
|
object SparkTransformBioDatabaseToOAF {
|
||||||
|
|
||||||
|
@ -25,8 +28,13 @@ object SparkTransformBioDatabaseToOAF {
|
||||||
|
|
||||||
val dbPath: String = parser.get("dbPath")
|
val dbPath: String = parser.get("dbPath")
|
||||||
log.info("dbPath: {}", database)
|
log.info("dbPath: {}", database)
|
||||||
val targetPath: String = parser.get("targetPath")
|
|
||||||
log.info("targetPath: {}", database)
|
val mdstoreOutputVersion = parser.get("mdstoreOutputVersion")
|
||||||
|
log.info("mdstoreOutputVersion: {}", mdstoreOutputVersion)
|
||||||
|
|
||||||
|
val cleanedMdStoreVersion = MAPPER.readValue(mdstoreOutputVersion, classOf[MDStoreVersion])
|
||||||
|
val outputBasePath = cleanedMdStoreVersion.getHdfsPath
|
||||||
|
log.info("outputBasePath: {}", outputBasePath)
|
||||||
|
|
||||||
val spark: SparkSession =
|
val spark: SparkSession =
|
||||||
SparkSession
|
SparkSession
|
||||||
|
@ -43,24 +51,28 @@ object SparkTransformBioDatabaseToOAF {
|
||||||
case "UNIPROT" =>
|
case "UNIPROT" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.uniprotToOAF(i))),
|
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.uniprotToOAF(i))),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
case "PDB" =>
|
case "PDB" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.pdbTOOaf(i))),
|
spark.createDataset(sc.textFile(dbPath).flatMap(i => BioDBToOAF.pdbTOOaf(i))),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
case "SCHOLIX" =>
|
case "SCHOLIX" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.read.load(dbPath).as[ScholixResolved].map(i => BioDBToOAF.scholixResolvedToOAF(i)),
|
spark.read.load(dbPath).as[ScholixResolved].map(i => BioDBToOAF.scholixResolvedToOAF(i)),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
case "CROSSREF_LINKS" =>
|
case "CROSSREF_LINKS" =>
|
||||||
CollectionUtils.saveDataset(
|
CollectionUtils.saveDataset(
|
||||||
spark.createDataset(sc.textFile(dbPath).map(i => BioDBToOAF.crossrefLinksToOaf(i))),
|
spark.createDataset(sc.textFile(dbPath).map(i => BioDBToOAF.crossrefLinksToOaf(i))),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
val df = spark.read.text(s"$outputBasePath/$MDSTORE_DATA_PATH")
|
||||||
|
val mdStoreSize = df.count
|
||||||
|
writeHdfsFile(spark.sparkContext.hadoopConfiguration, s"$mdStoreSize", s"$outputBasePath/$MDSTORE_SIZE_PATH")
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,9 @@ import org.apache.commons.io.IOUtils
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql._
|
import org.apache.spark.sql._
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
import eu.dnetlib.dhp.common.Constants.{MDSTORE_DATA_PATH, MDSTORE_SIZE_PATH}
|
||||||
|
import eu.dnetlib.dhp.schema.mdstore.MDStoreVersion
|
||||||
|
import eu.dnetlib.dhp.utils.DHPUtils.{MAPPER, writeHdfsFile}
|
||||||
|
|
||||||
object SparkEBILinksToOaf {
|
object SparkEBILinksToOaf {
|
||||||
|
|
||||||
|
@ -32,8 +35,13 @@ object SparkEBILinksToOaf {
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
val sourcePath = parser.get("sourcePath")
|
val sourcePath = parser.get("sourcePath")
|
||||||
log.info(s"sourcePath -> $sourcePath")
|
log.info(s"sourcePath -> $sourcePath")
|
||||||
val targetPath = parser.get("targetPath")
|
val mdstoreOutputVersion = parser.get("mdstoreOutputVersion")
|
||||||
log.info(s"targetPath -> $targetPath")
|
log.info("mdstoreOutputVersion: {}", mdstoreOutputVersion)
|
||||||
|
|
||||||
|
val cleanedMdStoreVersion = MAPPER.readValue(mdstoreOutputVersion, classOf[MDStoreVersion])
|
||||||
|
val outputBasePath = cleanedMdStoreVersion.getHdfsPath
|
||||||
|
log.info("outputBasePath: {}", outputBasePath)
|
||||||
|
|
||||||
implicit val PMEncoder: Encoder[Oaf] = Encoders.kryo(classOf[Oaf])
|
implicit val PMEncoder: Encoder[Oaf] = Encoders.kryo(classOf[Oaf])
|
||||||
|
|
||||||
val ebLinks: Dataset[EBILinkItem] = spark.read
|
val ebLinks: Dataset[EBILinkItem] = spark.read
|
||||||
|
@ -46,7 +54,10 @@ object SparkEBILinksToOaf {
|
||||||
.flatMap(j => BioDBToOAF.parse_ebi_links(j.links))
|
.flatMap(j => BioDBToOAF.parse_ebi_links(j.links))
|
||||||
.filter(p => BioDBToOAF.EBITargetLinksFilter(p))
|
.filter(p => BioDBToOAF.EBITargetLinksFilter(p))
|
||||||
.flatMap(p => BioDBToOAF.convertEBILinksToOaf(p)),
|
.flatMap(p => BioDBToOAF.convertEBILinksToOaf(p)),
|
||||||
targetPath
|
s"$outputBasePath/$MDSTORE_DATA_PATH"
|
||||||
)
|
)
|
||||||
|
val df = spark.read.text(s"$outputBasePath/$MDSTORE_DATA_PATH")
|
||||||
|
val mdStoreSize = df.count
|
||||||
|
writeHdfsFile(spark.sparkContext.hadoopConfiguration, s"$mdStoreSize", s"$outputBasePath/$MDSTORE_SIZE_PATH")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
|
||||||
public class PrepareAffiliationRelationsTest {
|
public class PrepareAffiliationRelationsTest {
|
||||||
|
|
||||||
|
@ -74,21 +75,34 @@ public class PrepareAffiliationRelationsTest {
|
||||||
@Test
|
@Test
|
||||||
void testMatch() throws Exception {
|
void testMatch() throws Exception {
|
||||||
|
|
||||||
String crossrefAffiliationRelationPath = getClass()
|
String crossrefAffiliationRelationPathNew = getClass()
|
||||||
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json")
|
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror.json")
|
||||||
.getPath();
|
.getPath();
|
||||||
|
|
||||||
|
String crossrefAffiliationRelationPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/doi_to_ror_old.json")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
String publisherAffiliationRelationPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/publishers")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
|
String publisherAffiliationRelationOldPath = getClass()
|
||||||
|
.getResource("/eu/dnetlib/dhp/actionmanager/bipaffiliations/publichers_old")
|
||||||
|
.getPath();
|
||||||
|
|
||||||
String outputPath = workingDir.toString() + "/actionSet";
|
String outputPath = workingDir.toString() + "/actionSet";
|
||||||
|
|
||||||
PrepareAffiliationRelations
|
PrepareAffiliationRelations
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
"-isSparkSessionManaged", Boolean.FALSE.toString(),
|
||||||
"-crossrefInputPath", crossrefAffiliationRelationPath,
|
"-crossrefInputPath", crossrefAffiliationRelationPathNew,
|
||||||
"-pubmedInputPath", crossrefAffiliationRelationPath,
|
"-pubmedInputPath", crossrefAffiliationRelationPath,
|
||||||
"-openapcInputPath", crossrefAffiliationRelationPath,
|
"-openapcInputPath", crossrefAffiliationRelationPathNew,
|
||||||
"-dataciteInputPath", crossrefAffiliationRelationPath,
|
"-dataciteInputPath", crossrefAffiliationRelationPath,
|
||||||
"-webCrawlInputPath", crossrefAffiliationRelationPath,
|
"-webCrawlInputPath", crossrefAffiliationRelationPath,
|
||||||
|
"-publisherInputPath", publisherAffiliationRelationOldPath,
|
||||||
"-outputPath", outputPath
|
"-outputPath", outputPath
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -99,13 +113,8 @@ public class PrepareAffiliationRelationsTest {
|
||||||
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
|
||||||
.map(aa -> ((Relation) aa.getPayload()));
|
.map(aa -> ((Relation) aa.getPayload()));
|
||||||
|
|
||||||
// for (Relation r : tmp.collect()) {
|
|
||||||
// System.out.println(
|
|
||||||
// r.getSource() + "\t" + r.getTarget() + "\t" + r.getRelType() + "\t" + r.getRelClass() + "\t" + r.getSubRelType() + "\t" + r.getValidationDate() + "\t" + r.getDataInfo().getTrust() + "\t" + r.getDataInfo().getInferred()
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
// count the number of relations
|
// count the number of relations
|
||||||
assertEquals(120, tmp.count());
|
assertEquals(150, tmp.count());// 18 + 24 *3 + 30 * 2 =
|
||||||
|
|
||||||
Dataset<Relation> dataset = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
Dataset<Relation> dataset = spark.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
dataset.createOrReplaceTempView("result");
|
dataset.createOrReplaceTempView("result");
|
||||||
|
@ -116,7 +125,7 @@ public class PrepareAffiliationRelationsTest {
|
||||||
// verify that we have equal number of bi-directional relations
|
// verify that we have equal number of bi-directional relations
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
60, execVerification
|
75, execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"relClass='" + ModelConstants.HAS_AUTHOR_INSTITUTION + "'")
|
"relClass='" + ModelConstants.HAS_AUTHOR_INSTITUTION + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
|
@ -124,26 +133,56 @@ public class PrepareAffiliationRelationsTest {
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
60, execVerification
|
75, execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"relClass='" + ModelConstants.IS_AUTHOR_INSTITUTION_OF + "'")
|
"relClass='" + ModelConstants.IS_AUTHOR_INSTITUTION_OF + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
.size());
|
.size());
|
||||||
|
|
||||||
// check confidence value of a specific relation
|
// check confidence value of a specific relation
|
||||||
String sourceDOI = "10.1061/(asce)0733-9399(2002)128:7(759)";
|
String sourceDOI = "10.1089/10872910260066679";
|
||||||
|
|
||||||
final String sourceOpenaireId = ID_PREFIX
|
final String sourceOpenaireId = ID_PREFIX
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", sourceDOI));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", sourceDOI));
|
||||||
|
|
||||||
Assertions
|
Assertions
|
||||||
.assertEquals(
|
.assertEquals(
|
||||||
"0.7071067812", execVerification
|
"1.0", execVerification
|
||||||
.filter(
|
.filter(
|
||||||
"source='" + sourceOpenaireId + "'")
|
"source='" + sourceOpenaireId + "'")
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
.get(0)
|
.get(0)
|
||||||
.getString(4));
|
.getString(4));
|
||||||
|
|
||||||
|
final String publisherid = ID_PREFIX
|
||||||
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1089/10872910260066679"));
|
||||||
|
final String rorId = "20|ror_________::" + IdentifierFactory.md5("https://ror.org/05cf8a891");
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
2, execVerification.filter("source = '" + publisherid + "' and target = '" + rorId + "'").count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
1, execVerification
|
||||||
|
.filter(
|
||||||
|
"source = '" + ID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue("doi", "10.1007/s00217-010-1268-9"))
|
||||||
|
+ "' and target = '" + "20|ror_________::"
|
||||||
|
+ IdentifierFactory.md5("https://ror.org/03265fv13") + "'")
|
||||||
|
.count());
|
||||||
|
|
||||||
|
Assertions
|
||||||
|
.assertEquals(
|
||||||
|
3, execVerification
|
||||||
|
.filter(
|
||||||
|
"source = '" + ID_PREFIX
|
||||||
|
+ IdentifierFactory
|
||||||
|
.md5(PidCleaner.normalizePidValue("doi", "10.1007/3-540-47984-8_14"))
|
||||||
|
+ "' and target = '" + "20|ror_________::"
|
||||||
|
+ IdentifierFactory.md5("https://ror.org/00a0n9e72") + "'")
|
||||||
|
.count());
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,6 +31,7 @@ import eu.dnetlib.dhp.schema.oaf.Publication;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
|
||||||
public class CreateOpenCitationsASTest {
|
public class CreateOpenCitationsASTest {
|
||||||
|
|
||||||
|
@ -280,17 +281,17 @@ public class CreateOpenCitationsASTest {
|
||||||
@Test
|
@Test
|
||||||
void testRelationsSourceTargetCouple() throws Exception {
|
void testRelationsSourceTargetCouple() throws Exception {
|
||||||
final String doi1 = "50|doi_________::"
|
final String doi1 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-015-3684-x"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1007/s10854-015-3684-x"));
|
||||||
final String doi2 = "50|doi_________::"
|
final String doi2 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1111/j.1551-2916.2008.02408.x"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1111/j.1551-2916.2008.02408.x"));
|
||||||
final String doi3 = "50|doi_________::"
|
final String doi3 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-014-2114-9"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1007/s10854-014-2114-9"));
|
||||||
final String doi4 = "50|doi_________::"
|
final String doi4 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1016/j.ceramint.2013.09.069"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1016/j.ceramint.2013.09.069"));
|
||||||
final String doi5 = "50|doi_________::"
|
final String doi5 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-009-9913-4"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1007/s10854-009-9913-4"));
|
||||||
final String doi6 = "50|doi_________::"
|
final String doi6 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1016/0038-1098(72)90370-5"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1016/0038-1098(72)90370-5"));
|
||||||
|
|
||||||
String inputPath = getClass()
|
String inputPath = getClass()
|
||||||
.getResource(
|
.getResource(
|
||||||
|
|
|
@ -77,13 +77,13 @@ public class RemapTest {
|
||||||
MapOCIdsInPids
|
MapOCIdsInPids
|
||||||
.main(
|
.main(
|
||||||
new String[] {
|
new String[] {
|
||||||
"-isSparkSessionManged",
|
"--isSparkSessionManged",
|
||||||
Boolean.FALSE.toString(),
|
Boolean.FALSE.toString(),
|
||||||
"-inputPath",
|
"--inputPath",
|
||||||
inputPath,
|
inputPath,
|
||||||
"-outputPath",
|
"--outputPath",
|
||||||
workingDir.toString() + "/out/",
|
workingDir.toString() + "/out/",
|
||||||
"-nameNode", "input1;input2;input3;input4;input5"
|
"--nameNode", "hdfs://localhost"
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,7 @@ import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Relation;
|
import eu.dnetlib.dhp.schema.oaf.Relation;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author miriam.baglioni
|
* @author miriam.baglioni
|
||||||
|
@ -270,17 +271,17 @@ public class CreateTAActionSetTest {
|
||||||
@Test
|
@Test
|
||||||
void testRelationsSourceTargetCouple() throws Exception {
|
void testRelationsSourceTargetCouple() throws Exception {
|
||||||
final String doi1 = "50|doi_________::"
|
final String doi1 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-015-3684-x"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1007/s10854-015-3684-x"));
|
||||||
final String doi2 = "50|doi_________::"
|
final String doi2 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1111/j.1551-2916.2008.02408.x"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1111/j.1551-2916.2008.02408.x"));
|
||||||
final String doi3 = "50|doi_________::"
|
final String doi3 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-014-2114-9"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1007/s10854-014-2114-9"));
|
||||||
final String doi4 = "50|doi_________::"
|
final String doi4 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1016/j.ceramint.2013.09.069"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1016/j.ceramint.2013.09.069"));
|
||||||
final String doi5 = "50|doi_________::"
|
final String doi5 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1007/s10854-009-9913-4"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1007/s10854-009-9913-4"));
|
||||||
final String doi6 = "50|doi_________::"
|
final String doi6 = "50|doi_________::"
|
||||||
+ IdentifierFactory.md5(CleaningFunctions.normalizePidValue("doi", "10.1016/0038-1098(72)90370-5"));
|
+ IdentifierFactory.md5(PidCleaner.normalizePidValue("doi", "10.1016/0038-1098(72)90370-5"));
|
||||||
|
|
||||||
String inputPath = getClass()
|
String inputPath = getClass()
|
||||||
.getResource(
|
.getResource(
|
||||||
|
|
|
@ -50,9 +50,10 @@ public class OsfPreprintsCollectorPluginTest {
|
||||||
@Test
|
@Test
|
||||||
@Disabled
|
@Disabled
|
||||||
void test_one() throws CollectorException {
|
void test_one() throws CollectorException {
|
||||||
this.plugin.collect(this.api, new AggregatorReport())
|
this.plugin
|
||||||
.limit(1)
|
.collect(this.api, new AggregatorReport())
|
||||||
.forEach(log::info);
|
.limit(1)
|
||||||
|
.forEach(log::info);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -95,7 +96,8 @@ public class OsfPreprintsCollectorPluginTest {
|
||||||
final HttpConnector2 connector = new HttpConnector2();
|
final HttpConnector2 connector = new HttpConnector2();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
final String res = connector.getInputSource("https://api.osf.io/v2/preprints/ydtzx/contributors/?format=json");
|
final String res = connector
|
||||||
|
.getInputSource("https://api.osf.io/v2/preprints/ydtzx/contributors/?format=json");
|
||||||
System.out.println(res);
|
System.out.println(res);
|
||||||
fail();
|
fail();
|
||||||
} catch (final Throwable e) {
|
} catch (final Throwable e) {
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
{"DOI":"10.1061\/(asce)0733-9399(2002)128:7(759)","Matchings":[{"RORid":"https:\/\/ror.org\/03yxnpp24","Confidence":0.7071067812},{"RORid":"https:\/\/ror.org\/01teme464","Confidence":0.89}]}
|
{"DOI":"10.1021\/ac020069k","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/01f5ytq51","Status":"active","Confidence":1}]}
|
||||||
{"DOI":"10.1105\/tpc.8.3.343","Matchings":[{"RORid":"https:\/\/ror.org\/02k40bc56","Confidence":0.7071067812}]}
|
{"DOI":"10.1161\/01.cir.0000013846.72805.7e","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/02pttbw34","Status":"active","Confidence":1}]}
|
||||||
{"DOI":"10.1161\/01.cir.0000013305.01850.37","Matchings":[{"RORid":"https:\/\/ror.org\/00qjgza05","Confidence":1}]}
|
{"DOI":"10.1161\/hy02t2.102992","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/00qqv6244","Status":"active","Confidence":1},{"PID":"ROR","Value":"https:\/\/ror.org\/00p991c53","Status":"active","Confidence":1}]}
|
||||||
{"DOI":"10.1142\/s021821650200186x","Matchings":[{"RORid":"https:\/\/ror.org\/035xkbk20","Confidence":1},{"RORid":"https:\/\/ror.org\/05apxxy63","Confidence":1}]}
|
{"DOI":"10.1126\/science.1073633","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/03xez1567","Status":"active","Confidence":1},{"PID":"ROR","Value":"https:\/\/ror.org\/006w34k90","Status":"active","Confidence":1}]}
|
||||||
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(575)","Matchings":[{"RORid":"https:\/\/ror.org\/04j198w64","Confidence":0.82}]}
|
{"DOI":"10.1089\/10872910260066679","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/05cf8a891","Status":"active","Confidence":1}]}
|
||||||
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(588)","Matchings":[{"RORid":"https:\/\/ror.org\/03m8km719","Confidence":0.8660254038},{"RORid":"https:\/\/ror.org\/02aze4h65","Confidence":0.87}]}
|
{"DOI":"10.1108\/02656719610116117","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/03mnm0t94","Status":"active","Confidence":1},{"PID":"ROR","Value":"https:\/\/ror.org\/007tn5k56","Status":"active","Confidence":1}]}
|
||||||
{"DOI":"10.1161\/hy0202.103001","Matchings":[{"RORid":"https:\/\/ror.org\/057xtrt18","Confidence":0.7071067812}]}
|
{"DOI":"10.1080\/01443610050111986","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/001x4vz59","Status":"active","Confidence":1},{"PID":"ROR","Value":"https:\/\/ror.org\/01tmqtf75","Status":"active","Confidence":1}]}
|
||||||
{"DOI": "10.1080/13669877.2015.1042504", "Matchings": [{"Confidence": 1.0, "RORid": "https://ror.org/03265fv13"}]}
|
{"DOI":"10.1021\/cm020118+","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/02cf1je33","Confidence":1,"Status":"inactive"},{"PID":"ROR","Value":"https:\/\/ror.org\/01hvx5h04","Confidence":1,"Status":"active"}]}
|
||||||
{"DOI": "10.1007/3-540-47984-8_14", "Matchings": [{"Confidence": 1.0, "RORid": "https://ror.org/00a0n9e72"}]}
|
{"DOI":"10.1161\/hc1202.104524","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/040r8fr65","Status":"active","Confidence":1},{"PID":"ROR","Value":"https:\/\/ror.org\/04fctr677","Status":"active","Confidence":1}]}
|
||||||
|
{"DOI":"10.1021\/ma011134f","Matchings":[{"PID":"ROR","Value":"https:\/\/ror.org\/04tj63d06","Status":"active","Confidence":1}]}
|
|
@ -0,0 +1,9 @@
|
||||||
|
{"DOI":"10.1061\/(asce)0733-9399(2002)128:7(759)","Matchings":[{"RORid":"https:\/\/ror.org\/03yxnpp24","Confidence":0.7071067812},{"RORid":"https:\/\/ror.org\/01teme464","Confidence":0.89}]}
|
||||||
|
{"DOI":"10.1105\/tpc.8.3.343","Matchings":[{"RORid":"https:\/\/ror.org\/02k40bc56","Confidence":0.7071067812}]}
|
||||||
|
{"DOI":"10.1161\/01.cir.0000013305.01850.37","Matchings":[{"RORid":"https:\/\/ror.org\/00qjgza05","Confidence":1}]}
|
||||||
|
{"DOI":"10.1142\/s021821650200186x","Matchings":[{"RORid":"https:\/\/ror.org\/035xkbk20","Confidence":1},{"RORid":"https:\/\/ror.org\/05apxxy63","Confidence":1}]}
|
||||||
|
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(575)","Matchings":[{"RORid":"https:\/\/ror.org\/04j198w64","Confidence":0.82}]}
|
||||||
|
{"DOI":"10.1061\/(asce)0733-9372(2002)128:7(588)","Matchings":[{"RORid":"https:\/\/ror.org\/03m8km719","Confidence":0.8660254038},{"RORid":"https:\/\/ror.org\/02aze4h65","Confidence":0.87}]}
|
||||||
|
{"DOI":"10.1161\/hy0202.103001","Matchings":[{"RORid":"https:\/\/ror.org\/057xtrt18","Confidence":0.7071067812}]}
|
||||||
|
{"DOI": "10.1080/13669877.2015.1042504", "Matchings": [{"Confidence": 1.0, "RORid": "https://ror.org/03265fv13"}]}
|
||||||
|
{"DOI": "https://doi.org/10.1007/3-540-47984-8_14", "Matchings": [{"Confidence": 1.0, "RORid": "https://ror.org/00a0n9e72"}]}
|
|
@ -0,0 +1,6 @@
|
||||||
|
{"DOI": "10.1007/s00217-010-1268-9", "Authors": [{"Name": {"Full": "Martin Zarnkow", "First": null, "Last": null}, "Raw_affiliations": ["TU M\u00fcnchen, Lehrstuhl f\u00fcr Brau- und Getr\u00e4nketechnologie"], "Organization_PIDs": []}, {"Name": {"Full": "Andrea Faltermaier", "First": null, "Last": null}, "Raw_affiliations": ["Lehrstuhl f\u00fcr Brau- und Getr\u00e4nketechnologie"], "Organization_PIDs": []}, {"Name": {"Full": "Werner Back", "First": null, "Last": null}, "Raw_affiliations": ["Lehrstuhl f\u00fcr Technologie der Brauerei I"], "Organization_PIDs": []}, {"Name": {"Full": "Martina Gastl", "First": null, "Last": null}, "Raw_affiliations": ["Lehrstuhl f\u00fcr Brau- und Getr\u00e4nketechnologie"], "Organization_PIDs": []}, {"Name": {"Full": "Elkek K. Arendt", "First": null, "Last": null}, "Raw_affiliations": ["University College Cork"], "Organization_PIDs": [{"RORid": "https://ror.org/03265fv13", "Confidence": 1}]}], "Organizations": [{"RORid": "https://ror.org/03265fv13", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/BF01154707", "Authors": [{"Name": {"Full": "Buggy, M.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Materials Science and Technology, University of Limerick, Limerick, Ireland"], "Organization_PIDs": [{"RORid": "https://ror.org/00a0n9e72", "Confidence": 1}]}, {"Name": {"Full": "Carew, A.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Materials Science and Technology, University of Limerick, Limerick, Ireland"], "Organization_PIDs": [{"RORid": "https://ror.org/00a0n9e72", "Confidence": 1}]}], "Organizations": [{"RORid": "https://ror.org/00a0n9e72", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/s10237-017-0974-7", "Authors": [{"Name": {"Full": "Donnacha J. McGrath", "First": null, "Last": null}, "Raw_affiliations": ["Biomechanics Research Centre (BMEC), Biomedical Engineering, College of Engineering and Informatics, NUI Galway, Galway, Ireland"], "Organization_PIDs": [{"RORid": "https://ror.org/03bea9k73", "Confidence": 1}]}, {"Name": {"Full": "Anja Lena Thiebes", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biohybrid and Medical Textiles (BioTex), AME-Helmholtz Institute for Biomedical Engineering, ITA-Institut f\u00fcr Textiltechnik, RWTH Aachen University and at AMIBM Maastricht University, Maastricht, The Netherlands, Aachen, Germany"], "Organization_PIDs": [{"RORid": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"RORid": "https://ror.org/04xfq0f34", "Confidence": 0.87}]}, {"Name": {"Full": "Christian G. Cornelissen", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biohybrid and Medical Textiles (BioTex), AME-Helmholtz Institute for Biomedical Engineering, ITA-Institut f\u00fcr Textiltechnik, RWTH Aachen University and at AMIBM Maastricht University, Maastricht, The Netherlands, Aachen, Germany"], "Organization_PIDs": [{"RORid": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"RORid": "https://ror.org/04xfq0f34", "Confidence": 0.87}]}, {"Name": {"Full": "Barry O\u2019Brien", "First": null, "Last": null}, "Raw_affiliations": ["Department for Internal Medicine \u2013 Section for Pneumology, Medical Faculty, RWTH Aachen University, Aachen, Germany"], "Organization_PIDs": [{"RORid": "https://ror.org/04xfq0f34", "Confidence": 1}]}, {"Name": {"Full": "Stefan Jockenhoevel", "First": null, "Last": null}, "Raw_affiliations": ["Biomechanics Research Centre (BMEC), Biomedical Engineering, College of Engineering and Informatics, NUI Galway, Galway, Ireland"], "Organization_PIDs": [{"RORid": "https://ror.org/03bea9k73", "Confidence": 1}]}, {"Name": {"Full": "Mark Bruzzi", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biohybrid and Medical Textiles (BioTex), AME-Helmholtz Institute for Biomedical Engineering, ITA-Institut f\u00fcr Textiltechnik, RWTH Aachen University and at AMIBM Maastricht University, Maastricht, The Netherlands, Aachen, Germany"], "Organization_PIDs": [{"RORid": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"RORid": "https://ror.org/04xfq0f34", "Confidence": 0.87}]}, {"Name": {"Full": "Peter E. McHugh", "First": null, "Last": null}, "Raw_affiliations": ["Biomechanics Research Centre (BMEC), Biomedical Engineering, College of Engineering and Informatics, NUI Galway, Galway, Ireland"], "Organization_PIDs": [{"RORid": "https://ror.org/03bea9k73", "Confidence": 1}]}], "Organizations": [{"RORid": "https://ror.org/03bea9k73", "Confidence": 1}, {"RORid": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"RORid": "https://ror.org/04xfq0f34", "Confidence": 0.87}, {"RORid": "https://ror.org/04xfq0f34", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/BF03168973", "Authors": [{"Name": {"Full": "Sheehan, G.", "First": null, "Last": null}, "Raw_affiliations": ["Dept of Infectious Diseases, Mater Misercordiae Hospital, Dublin 7"], "Organization_PIDs": []}, {"Name": {"Full": "Chew, N.", "First": null, "Last": null}, "Raw_affiliations": ["Dept of Infectious Diseases, Mater Misercordiae Hospital, Dublin 7"], "Organization_PIDs": []}], "Organizations": []}
|
||||||
|
{"DOI": "10.1007/s00338-009-0480-1", "Authors": [{"Name": {"Full": "Gleason, D. F.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biology, Georgia Southern University, Statesboro, USA"], "Organization_PIDs": [{"RORid": "https://ror.org/04agmb972", "Confidence": 1}]}, {"Name": {"Full": "Danilowicz, B. S.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biology, Georgia Southern University, Statesboro, USA"], "Organization_PIDs": [{"RORid": "https://ror.org/04agmb972", "Confidence": 1}]}, {"Name": {"Full": "Nolan, C. J.", "First": null, "Last": null}, "Raw_affiliations": ["School of Biology and Environmental Science, University College Dublin, Dublin 4, Ireland"], "Organization_PIDs": [{"RORid": "https://ror.org/05m7pjf47", "Confidence": 1}]}], "Organizations": [{"RORid": "https://ror.org/04agmb972", "Confidence": 1}, {"RORid": "https://ror.org/05m7pjf47", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/s10993-010-9187-y", "Authors": [{"Name": {"Full": "Martin Howard", "First": null, "Last": null}, "Raw_affiliations": ["University College Cork"], "Organization_PIDs": [{"RORid": "https://ror.org/03265fv13", "Confidence": 1}]}], "Organizations": [{"RORid": "https://ror.org/03265fv13", "Confidence": 1}]}
|
|
@ -0,0 +1,6 @@
|
||||||
|
{"DOI": "10.1007/s00217-010-1268-9", "Authors": [{"Name": {"Full": "Martin Zarnkow", "First": null, "Last": null}, "Raw_affiliations": ["TU M\u00fcnchen, Lehrstuhl f\u00fcr Brau- und Getr\u00e4nketechnologie"], "Organization_PIDs": []}, {"Name": {"Full": "Andrea Faltermaier", "First": null, "Last": null}, "Raw_affiliations": ["Lehrstuhl f\u00fcr Brau- und Getr\u00e4nketechnologie"], "Organization_PIDs": []}, {"Name": {"Full": "Werner Back", "First": null, "Last": null}, "Raw_affiliations": ["Lehrstuhl f\u00fcr Technologie der Brauerei I"], "Organization_PIDs": []}, {"Name": {"Full": "Martina Gastl", "First": null, "Last": null}, "Raw_affiliations": ["Lehrstuhl f\u00fcr Brau- und Getr\u00e4nketechnologie"], "Organization_PIDs": []}, {"Name": {"Full": "Elkek K. Arendt", "First": null, "Last": null}, "Raw_affiliations": ["University College Cork"], "Organization_PIDs": [{"Value": "https://ror.org/03265fv13", "Confidence": 1}]}], "Organizations": [{"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/03265fv13", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/BF01154707", "Authors": [{"Name": {"Full": "Buggy, M.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Materials Science and Technology, University of Limerick, Limerick, Ireland"], "Organization_PIDs": [{"Value": "https://ror.org/00a0n9e72", "Confidence": 1}]}, {"Name": {"Full": "Carew, A.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Materials Science and Technology, University of Limerick, Limerick, Ireland"], "Organization_PIDs": [{"Value": "https://ror.org/00a0n9e72", "Confidence": 1}]}], "Organizations": [{"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/00a0n9e72", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/s10237-017-0974-7", "Authors": [{"Name": {"Full": "Donnacha J. McGrath", "First": null, "Last": null}, "Raw_affiliations": ["Biomechanics Research Centre (BMEC), Biomedical Engineering, College of Engineering and Informatics, NUI Galway, Galway, Ireland"], "Organization_PIDs": [{"Value": "https://ror.org/03bea9k73", "Confidence": 1}]}, {"Name": {"Full": "Anja Lena Thiebes", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biohybrid and Medical Textiles (BioTex), AME-Helmholtz Institute for Biomedical Engineering, ITA-Institut f\u00fcr Textiltechnik, RWTH Aachen University and at AMIBM Maastricht University, Maastricht, The Netherlands, Aachen, Germany"], "Organization_PIDs": [{"Value": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"Value": "https://ror.org/04xfq0f34", "Confidence": 0.87}]}, {"Name": {"Full": "Christian G. Cornelissen", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biohybrid and Medical Textiles (BioTex), AME-Helmholtz Institute for Biomedical Engineering, ITA-Institut f\u00fcr Textiltechnik, RWTH Aachen University and at AMIBM Maastricht University, Maastricht, The Netherlands, Aachen, Germany"], "Organization_PIDs": [{"Value": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"Value": "https://ror.org/04xfq0f34", "Confidence": 0.87}]}, {"Name": {"Full": "Barry O\u2019Brien", "First": null, "Last": null}, "Raw_affiliations": ["Department for Internal Medicine \u2013 Section for Pneumology, Medical Faculty, RWTH Aachen University, Aachen, Germany"], "Organization_PIDs": [{"Value": "https://ror.org/04xfq0f34", "Confidence": 1}]}, {"Name": {"Full": "Stefan Jockenhoevel", "First": null, "Last": null}, "Raw_affiliations": ["Biomechanics Research Centre (BMEC), Biomedical Engineering, College of Engineering and Informatics, NUI Galway, Galway, Ireland"], "Organization_PIDs": [{"Value": "https://ror.org/03bea9k73", "Confidence": 1}]}, {"Name": {"Full": "Mark Bruzzi", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biohybrid and Medical Textiles (BioTex), AME-Helmholtz Institute for Biomedical Engineering, ITA-Institut f\u00fcr Textiltechnik, RWTH Aachen University and at AMIBM Maastricht University, Maastricht, The Netherlands, Aachen, Germany"], "Organization_PIDs": [{"Value": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"Value": "https://ror.org/04xfq0f34", "Confidence": 0.87}]}, {"Name": {"Full": "Peter E. McHugh", "First": null, "Last": null}, "Raw_affiliations": ["Biomechanics Research Centre (BMEC), Biomedical Engineering, College of Engineering and Informatics, NUI Galway, Galway, Ireland"], "Organization_PIDs": [{"Value": "https://ror.org/03bea9k73", "Confidence": 1}]}], "Organizations": [{"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/03bea9k73", "Confidence": 1}, {"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/02jz4aj89", "Confidence": 0.82}, {"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/04xfq0f34", "Confidence": 0.87}, {"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/04xfq0f34", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/BF03168973", "Authors": [{"Name": {"Full": "Sheehan, G.", "First": null, "Last": null}, "Raw_affiliations": ["Dept of Infectious Diseases, Mater Misercordiae Hospital, Dublin 7"], "Organization_PIDs": []}, {"Name": {"Full": "Chew, N.", "First": null, "Last": null}, "Raw_affiliations": ["Dept of Infectious Diseases, Mater Misercordiae Hospital, Dublin 7"], "Organization_PIDs": []}], "Organizations": []}
|
||||||
|
{"DOI": "10.1007/s00338-009-0480-1", "Authors": [{"Name": {"Full": "Gleason, D. F.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biology, Georgia Southern University, Statesboro, USA"], "Organization_PIDs": [{"Value": "https://ror.org/04agmb972", "Confidence": 1}]}, {"Name": {"Full": "Danilowicz, B. S.", "First": null, "Last": null}, "Raw_affiliations": ["Department of Biology, Georgia Southern University, Statesboro, USA"], "Organization_PIDs": [{"Value": "https://ror.org/04agmb972", "Confidence": 1}]}, {"Name": {"Full": "Nolan, C. J.", "First": null, "Last": null}, "Raw_affiliations": ["School of Biology and Environmental Science, University College Dublin, Dublin 4, Ireland"], "Organization_PIDs": [{"Value": "https://ror.org/05m7pjf47", "Confidence": 1}]}], "Organizations": [{"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/04agmb972", "Confidence": 1}, {"Provenance":"AffRo","PID":"ROR","Status":"active","Value": "https://ror.org/05m7pjf47", "Confidence": 1}]}
|
||||||
|
{"DOI": "10.1007/s10993-010-9187-y", "Authors": [{"Name": {"Full": "Martin Howard", "First": null, "Last": null}, "Raw_affiliations": ["University College Cork"], "Organization_PIDs": [{"Value": "https://ror.org/03265fv13", "Confidence": 1}]}], "Organizations": [{"PID":"ROR","Status":"active","Value": "https://ror.org/03265fv13", "Confidence": 1}]}
|
|
@ -1,15 +1,44 @@
|
||||||
{"pdb": "1CW0", "title": "crystal structure analysis of very short patch repair (vsr) endonuclease in complex with a duplex dna", "authors": ["S.E.Tsutakawa", "H.Jingami", "K.Morikawa"], "doi": "10.1016/S0092-8674(00)81550-0", "pmid": "10612397"}
|
{"classification": "Signaling protein", "pdb": "5NM4", "deposition_date": "2017-04-05", "title": "A2a adenosine receptor room-temperature structure determined by serial Femtosecond crystallography", "Keywords": ["Oom-temperature", " serial crystallography", " signaling protein"], "authors": ["T.weinert", "R.cheng", "D.james", "D.gashi", "P.nogly", "K.jaeger", "M.hennig", "", "J.standfuss"], "pmid": "28912485", "doi": "10.1038/S41467-017-00630-4"}
|
||||||
{"pdb": "2CWW", "title": "crystal structure of thermus thermophilus ttha1280, a putative sam- dependent rna methyltransferase, in complex with s-adenosyl-l- homocysteine", "authors": ["A.A.Pioszak", "K.Murayama", "N.Nakagawa", "A.Ebihara", "S.Kuramitsu", "M.Shirouzu", "S.Yokoyama", "Riken Structural Genomics/proteomics Initiative (Rsgi)"], "doi": "10.1107/S1744309105029842", "pmid": "16511182"}
|
{"classification": "Oxidoreductase/oxidoreductase inhibitor", "pdb": "4KN3", "deposition_date": "2013-05-08", "title": "Structure of the y34ns91g double mutant of dehaloperoxidase from Amphitrite ornata with 2,4,6-trichlorophenol", "Keywords": ["Lobin", " oxygen storage", " peroxidase", " oxidoreductase", " oxidoreductase-", "Oxidoreductase inhibitor complex"], "authors": ["C.wang", "L.lovelace", "L.lebioda"], "pmid": "23952341", "doi": "10.1021/BI400627W"}
|
||||||
{"pdb": "6CWE", "title": "structure of alpha-gsa[8,6p] bound by cd1d and in complex with the va14vb8.2 tcr", "authors": ["J.Wang", "D.Zajonc"], "doi": null, "pmid": null}
|
{"classification": "Transport protein", "pdb": "8HKM", "deposition_date": "2022-11-27", "title": "Ion channel", "Keywords": ["On channel", " transport protein"], "authors": ["D.h.jiang", "J.t.zhang"], "pmid": "37494189", "doi": "10.1016/J.CELREP.2023.112858"}
|
||||||
{"pdb": "5CWS", "title": "crystal structure of the intact chaetomium thermophilum nsp1-nup49- nup57 channel nucleoporin heterotrimer bound to its nic96 nuclear pore complex attachment site", "authors": ["C.J.Bley", "S.Petrovic", "M.Paduch", "V.Lu", "A.A.Kossiakoff", "A.Hoelz"], "doi": "10.1126/SCIENCE.AAC9176", "pmid": "26316600"}
|
{"classification": "Signaling protein", "pdb": "6JT1", "deposition_date": "2019-04-08", "title": "Structure of human soluble guanylate cyclase in the heme oxidised State", "Keywords": ["Oluble guanylate cyclase", " signaling protein"], "authors": ["L.chen", "Y.kang", "R.liu", "J.-x.wu"], "pmid": "31514202", "doi": "10.1038/S41586-019-1584-6"}
|
||||||
{"pdb": "5CWE", "title": "structure of cyp107l2 from streptomyces avermitilis with lauric acid", "authors": ["T.-V.Pham", "S.-H.Han", "J.-H.Kim", "D.-H.Kim", "L.-W.Kang"], "doi": null, "pmid": null}
|
{"classification": "Immune system", "pdb": "7OW6", "deposition_date": "2021-06-16", "title": "Crystal structure of a tcr in complex with hla-a*11:01 bound to kras G12d peptide (vvvgadgvgk)", "Keywords": ["La", " kras", " tcr", " immune system"], "authors": ["V.karuppiah", "R.a.robinson"], "doi": "10.1038/S41467-022-32811-1"}
|
||||||
{"pdb": "7CW4", "title": "acetyl-coa acetyltransferase from bacillus cereus atcc 14579", "authors": ["J.Hong", "K.J.Kim"], "doi": "10.1016/J.BBRC.2020.09.048", "pmid": "32972748"}
|
{"classification": "Biosynthetic protein", "pdb": "5EQ8", "deposition_date": "2015-11-12", "title": "Crystal structure of medicago truncatula histidinol-phosphate Phosphatase (mthpp) in complex with l-histidinol", "Keywords": ["Istidine biosynthesis", " metabolic pathways", " dimer", " plant", "", "Biosynthetic protein"], "authors": ["M.ruszkowski", "Z.dauter"], "pmid": "26994138", "doi": "10.1074/JBC.M115.708727"}
|
||||||
{"pdb": "2CWP", "title": "crystal structure of metrs related protein from pyrococcus horikoshii", "authors": ["K.Murayama", "M.Kato-Murayama", "M.Shirouzu", "S.Yokoyama", "Riken StructuralGenomics/proteomics Initiative (Rsgi)"], "doi": null, "pmid": null}
|
{"classification": "De novo protein", "pdb": "8CWA", "deposition_date": "2022-05-18", "title": "Solution nmr structure of 8-residue rosetta-designed cyclic peptide D8.21 in cdcl3 with cis/trans switching (tc conformation, 53%)", "Keywords": ["Yclic peptide", " non natural amino acids", " cis/trans", " switch peptides", "", "De novo design", "Membrane permeability", "De novo protein"], "authors": ["T.a.ramelot", "R.tejero", "G.t.montelione"], "pmid": "36041435", "doi": "10.1016/J.CELL.2022.07.019"}
|
||||||
{"pdb": "2CW7", "title": "crystal structure of intein homing endonuclease ii", "authors": ["H.Matsumura", "H.Takahashi", "T.Inoue", "H.Hashimoto", "M.Nishioka", "S.Fujiwara", "M.Takagi", "T.Imanaka", "Y.Kai"], "doi": "10.1002/PROT.20858", "pmid": "16493661"}
|
{"classification": "Hydrolase", "pdb": "3R6M", "deposition_date": "2011-03-21", "title": "Crystal structure of vibrio parahaemolyticus yeaz", "Keywords": ["Ctin/hsp70 nucleotide-binding fold", " bacterial resuscitation", " viable", "But non-culturable state", "Resuscitation promoting factor", "Ygjd", "", "Yjee", "Vibrio parahaemolyticus", "Hydrolase"], "authors": ["A.roujeinikova", "I.aydin"], "pmid": "21858042", "doi": "10.1371/JOURNAL.PONE.0023245"}
|
||||||
{"pdb": "1CWU", "title": "brassica napus enoyl acp reductase a138g mutant complexed with nad+ and thienodiazaborine", "authors": ["A.Roujeinikova", "J.B.Rafferty", "D.W.Rice"], "doi": "10.1074/JBC.274.43.30811", "pmid": "10521472"}
|
{"classification": "Hydrolase", "pdb": "2W5J", "deposition_date": "2008-12-10", "title": "Structure of the c14-rotor ring of the proton translocating Chloroplast atp synthase", "Keywords": ["Ydrolase", " chloroplast", " atp synthase", " lipid-binding", " cf(0)", " membrane", "", "Transport", "Formylation", "Energy transduction", "Hydrogen ion transport", "", "Ion transport", "Transmembrane", "Membrane protein"], "authors": ["M.vollmar", "D.schlieper", "M.winn", "C.buechner", "G.groth"], "pmid": "19423706", "doi": "10.1074/JBC.M109.006916"}
|
||||||
{"pdb": "3CWN", "title": "escherichia coli transaldolase b mutant f178y", "authors": ["T.Sandalova", "G.Schneider", "A.Samland"], "doi": "10.1074/JBC.M803184200", "pmid": "18687684"}
|
{"classification": "De novo protein", "pdb": "4GLU", "deposition_date": "2012-08-14", "title": "Crystal structure of the mirror image form of vegf-a", "Keywords": ["-protein", " covalent dimer", " cysteine knot protein", " growth factor", " de", "Novo protein"], "authors": ["K.mandal", "M.uppalapati", "D.ault-riche", "J.kenney", "J.lowitz", "S.sidhu", "", "S.b.h.kent"], "pmid": "22927390", "doi": "10.1073/PNAS.1210483109"}
|
||||||
{"pdb": "1CWL", "title": "human cyclophilin a complexed with 4 4-hydroxy-meleu cyclosporin", "authors": ["V.Mikol", "J.Kallen", "P.Taylor", "M.D.Walkinshaw"], "doi": "10.1006/JMBI.1998.2108", "pmid": "9769216"}
|
{"classification": "Hydrolase/hydrolase inhibitor", "pdb": "3WYL", "deposition_date": "2014-09-01", "title": "Crystal structure of the catalytic domain of pde10a complexed with 5- Methoxy-3-(1-phenyl-1h-pyrazol-5-yl)-1-(3-(trifluoromethyl)phenyl) Pyridazin-4(1h)-one", "Keywords": ["Ydrolase-hydrolase inhibitor complex"], "authors": ["H.oki", "Y.hayano"], "pmid": "25384088", "doi": "10.1021/JM5013648"}
|
||||||
{"pdb": "3CW2", "title": "crystal structure of the intact archaeal translation initiation factor 2 from sulfolobus solfataricus .", "authors": ["E.A.Stolboushkina", "S.V.Nikonov", "A.D.Nikulin", "U.Blaesi", "D.J.Manstein", "R.V.Fedorov", "M.B.Garber", "O.S.Nikonov"], "doi": "10.1016/J.JMB.2008.07.039", "pmid": "18675278"}
|
{"classification": "Isomerase", "pdb": "5BOR", "deposition_date": "2015-05-27", "title": "Structure of acetobacter aceti pure-s57c, sulfonate form", "Keywords": ["Cidophile", " pure", " purine biosynthesis", " isomerase"], "authors": ["K.l.sullivan", "T.j.kappock"]}
|
||||||
{"pdb": "3CW9", "title": "4-chlorobenzoyl-coa ligase/synthetase in the thioester-forming conformation, bound to 4-chlorophenacyl-coa", "authors": ["A.S.Reger", "J.Cao", "R.Wu", "D.Dunaway-Mariano", "A.M.Gulick"], "doi": "10.1021/BI800696Y", "pmid": "18620418"}
|
{"classification": "Hydrolase", "pdb": "1X0C", "deposition_date": "2005-03-17", "title": "Improved crystal structure of isopullulanase from aspergillus niger Atcc 9642", "Keywords": ["Ullulan", " glycoside hydrolase family 49", " glycoprotein", " hydrolase"], "authors": ["M.mizuno", "T.tonozuka", "A.yamamura", "Y.miyasaka", "H.akeboshi", "S.kamitori", "", "A.nishikawa", "Y.sakano"], "pmid": "18155243", "doi": "10.1016/J.JMB.2007.11.098"}
|
||||||
{"pdb": "3CWU", "title": "crystal structure of an alka host/guest complex 2'-fluoro-2'-deoxy-1, n6-ethenoadenine:thymine base pair", "authors": ["B.R.Bowman", "S.Lee", "S.Wang", "G.L.Verdine"], "doi": "10.1016/J.STR.2008.04.012", "pmid": "18682218"}
|
{"classification": "Oxidoreductase", "pdb": "7CUP", "deposition_date": "2020-08-23", "title": "Structure of 2,5-dihydroxypridine dioxygenase from pseudomonas putida Kt2440", "Keywords": ["On-heme dioxygenase", " oxidoreductase"], "authors": ["G.q.liu", "H.z.tang"]}
|
||||||
{"pdb": "5CWF", "title": "crystal structure of de novo designed helical repeat protein dhr8", "authors": ["G.Bhabha", "D.C.Ekiert"], "doi": "10.1038/NATURE16162", "pmid": "26675729"}
|
{"classification": "Ligase", "pdb": "1VCN", "deposition_date": "2004-03-10", "title": "Crystal structure of t.th. hb8 ctp synthetase complex with sulfate Anion", "Keywords": ["Etramer", " riken structural genomics/proteomics initiative", " rsgi", "", "Structural genomics", "Ligase"], "authors": ["M.goto", "Riken structural genomics/proteomics initiative (rsgi)"], "pmid": "15296735", "doi": "10.1016/J.STR.2004.05.013"}
|
||||||
|
{"classification": "Transferase/transferase inhibitor", "pdb": "6C9V", "deposition_date": "2018-01-28", "title": "Mycobacterium tuberculosis adenosine kinase bound to (2r,3s,4r,5r)-2- (hydroxymethyl)-5-(6-(4-phenylpiperazin-1-yl)-9h-purin-9-yl) Tetrahydrofuran-3,4-diol", "Keywords": ["Ucleoside analog", " complex", " inhibitor", " structural genomics", " psi-2", "", "Protein structure initiative", "Tb structural genomics consortium", "", "Tbsgc", "Transferase-transferase inhibitor complex"], "authors": ["R.a.crespo", "Tb structural genomics consortium (tbsgc)"], "pmid": "31002508", "doi": "10.1021/ACS.JMEDCHEM.9B00020"}
|
||||||
|
{"classification": "De novo protein", "pdb": "4LPY", "deposition_date": "2013-07-16", "title": "Crystal structure of tencon variant g10", "Keywords": ["Ibronectin type iii fold", " alternate scaffold", " de novo protein"], "authors": ["A.teplyakov", "G.obmolova", "G.l.gilliland"], "pmid": "24375666", "doi": "10.1002/PROT.24502"}
|
||||||
|
{"classification": "Isomerase", "pdb": "2Y88", "deposition_date": "2011-02-03", "title": "Crystal structure of mycobacterium tuberculosis phosphoribosyl Isomerase (variant d11n) with bound prfar", "Keywords": ["Romatic amino acid biosynthesis", " isomerase", " tim-barrel", " histidine", "Biosynthesis", "Tryptophan biosynthesis"], "authors": ["J.kuper", "A.v.due", "A.geerlof", "M.wilmanns"], "pmid": "21321225", "doi": "10.1073/PNAS.1015996108"}
|
||||||
|
{"classification": "Unknown function", "pdb": "1SR0", "deposition_date": "2004-03-22", "title": "Crystal structure of signalling protein from sheep(sps-40) at 3.0a Resolution using crystal grown in the presence of polysaccharides", "Keywords": ["Ignalling protein", " involution", " unknown function"], "authors": ["D.b.srivastava", "A.s.ethayathulla", "N.singh", "J.kumar", "S.sharma", "T.p.singh"]}
|
||||||
|
{"classification": "Dna binding protein", "pdb": "3RH2", "deposition_date": "2011-04-11", "title": "Crystal structure of a tetr-like transcriptional regulator (sama_0099) From shewanella amazonensis sb2b at 2.42 a resolution", "Keywords": ["Na/rna-binding 3-helical bundle", " structural genomics", " joint center", "For structural genomics", "Jcsg", "Protein structure initiative", "Psi-", "Biology", "Dna binding protein"], "authors": ["Joint center for structural genomics (jcsg)"]}
|
||||||
|
{"classification": "Transferase", "pdb": "2WK5", "deposition_date": "2009-06-05", "title": "Structural features of native human thymidine phosphorylase And in complex with 5-iodouracil", "Keywords": ["Lycosyltransferase", " developmental protein", " angiogenesis", "", "5-iodouracil", "Growth factor", "Enzyme kinetics", "", "Differentiation", "Disease mutation", "Thymidine", "Phosphorylase", "Chemotaxis", "Transferase", "Mutagenesis", "", "Polymorphism"], "authors": ["E.mitsiki", "A.c.papageorgiou", "S.iyer", "N.thiyagarajan", "S.h.prior", "", "D.sleep", "C.finnis", "K.r.acharya"], "pmid": "19555658", "doi": "10.1016/J.BBRC.2009.06.104"}
|
||||||
|
{"classification": "Hydrolase", "pdb": "3P9Y", "deposition_date": "2010-10-18", "title": "Crystal structure of the drosophila melanogaster ssu72-pctd complex", "Keywords": ["Hosphatase", " cis proline", " lmw ptp-like fold", " rna polymerase ii ctd", "", "Hydrolase"], "authors": ["J.w.werner-allen", "P.zhou"], "pmid": "21159777", "doi": "10.1074/JBC.M110.197129"}
|
||||||
|
{"classification": "Recombination/dna", "pdb": "6OEO", "deposition_date": "2019-03-27", "title": "Cryo-em structure of mouse rag1/2 nfc complex (dna1)", "Keywords": ["(d)j recombination", " dna transposition", " rag", " scid", " recombination", "", "Recombination-dna complex"], "authors": ["X.chen", "Y.cui", "Z.h.zhou", "W.yang", "M.gellert"], "pmid": "32015552", "doi": "10.1038/S41594-019-0363-2"}
|
||||||
|
{"classification": "Hydrolase", "pdb": "4ECA", "deposition_date": "1997-02-21", "title": "Asparaginase from e. coli, mutant t89v with covalently bound aspartate", "Keywords": ["Ydrolase", " acyl-enzyme intermediate", " threonine amidohydrolase"], "authors": ["G.j.palm", "J.lubkowski", "A.wlodawer"], "pmid": "8706862", "doi": "10.1016/0014-5793(96)00660-6"}
|
||||||
|
{"classification": "Transcription/protein binding", "pdb": "3UVX", "deposition_date": "2011-11-30", "title": "Crystal structure of the first bromodomain of human brd4 in complex With a diacetylated histone 4 peptide (h4k12ack16ac)", "Keywords": ["Romodomain", " bromodomain containing protein 4", " cap", " hunk1", " mcap", "", "Mitotic chromosome associated protein", "Peptide complex", "Structural", "Genomics consortium", "Sgc", "Transcription-protein binding complex"], "authors": ["P.filippakopoulos", "S.picaud", "T.keates", "E.ugochukwu", "F.von delft", "", "C.h.arrowsmith", "A.m.edwards", "J.weigelt", "C.bountra", "S.knapp", "Structural", "Genomics consortium (sgc)"], "pmid": "22464331", "doi": "10.1016/J.CELL.2012.02.013"}
|
||||||
|
{"classification": "Membrane protein", "pdb": "1TLZ", "deposition_date": "2004-06-10", "title": "Tsx structure complexed with uridine", "Keywords": ["Ucleoside transporter", " beta barrel", " uridine", " membrane", "Protein"], "authors": ["J.ye", "B.van den berg"], "pmid": "15272310", "doi": "10.1038/SJ.EMBOJ.7600330"}
|
||||||
|
{"classification": "Dna binding protein", "pdb": "7AZD", "deposition_date": "2020-11-16", "title": "Dna polymerase sliding clamp from escherichia coli with peptide 20 Bound", "Keywords": ["Ntibacterial drug", " dna binding protein"], "authors": ["C.monsarrat", "G.compain", "C.andre", "I.martiel", "S.engilberge", "V.olieric", "", "P.wolff", "K.brillet", "M.landolfo", "C.silva da veiga", "J.wagner", "G.guichard", "", "D.y.burnouf"], "pmid": "34806883", "doi": "10.1021/ACS.JMEDCHEM.1C00918"}
|
||||||
|
{"classification": "Transferase", "pdb": "5N3K", "deposition_date": "2017-02-08", "title": "Camp-dependent protein kinase a from cricetulus griseus in complex With fragment like molecule o-guanidino-l-homoserine", "Keywords": ["Ragment", " complex", " transferase", " serine threonine kinase", " camp", "", "Kinase", "Pka"], "authors": ["C.siefker", "A.heine", "G.klebe"]}
|
||||||
|
{"classification": "Biosynthetic protein", "pdb": "8H52", "deposition_date": "2022-10-11", "title": "Crystal structure of helicobacter pylori carboxyspermidine Dehydrogenase in complex with nadp", "Keywords": ["Arboxyspermidine dehydrogenase", " biosynthetic protein"], "authors": ["K.y.ko", "S.c.park", "S.y.cho", "S.i.yoon"], "pmid": "36283333", "doi": "10.1016/J.BBRC.2022.10.049"}
|
||||||
|
{"classification": "Metal binding protein", "pdb": "6DYC", "deposition_date": "2018-07-01", "title": "Co(ii)-bound structure of the engineered cyt cb562 variant, ch3", "Keywords": ["Esigned protein", " 4-helix bundle", " electron transport", " metal binding", "Protein"], "authors": ["F.a.tezcan", "J.rittle"], "pmid": "30778140", "doi": "10.1038/S41557-019-0218-9"}
|
||||||
|
{"classification": "Protein fibril", "pdb": "6A6B", "deposition_date": "2018-06-27", "title": "Cryo-em structure of alpha-synuclein fiber", "Keywords": ["Lpha-syn fiber", " parkinson disease", " protein fibril"], "authors": ["Y.w.li", "C.y.zhao", "F.luo", "Z.liu", "X.gui", "Z.luo", "X.zhang", "D.li", "C.liu", "X.li"], "pmid": "30065316", "doi": "10.1038/S41422-018-0075-X"}
|
||||||
|
{"classification": "Dna", "pdb": "7D5E", "deposition_date": "2020-09-25", "title": "Left-handed g-quadruplex containing two bulges", "Keywords": ["-quadruplex", " bulge", " dna", " left-handed"], "authors": ["P.das", "A.maity", "K.h.ngo", "F.r.winnerdy", "B.bakalar", "Y.mechulam", "E.schmitt", "", "A.t.phan"], "pmid": "33503265", "doi": "10.1093/NAR/GKAA1259"}
|
||||||
|
{"classification": "Transferase", "pdb": "3RSY", "deposition_date": "2011-05-02", "title": "Cellobiose phosphorylase from cellulomonas uda in complex with sulfate And glycerol", "Keywords": ["H94", " alpha barrel", " cellobiose phosphorylase", " disaccharide", "Phosphorylase", "Transferase"], "authors": ["A.van hoorebeke", "J.stout", "W.soetaert", "J.van beeumen", "T.desmet", "S.savvides"]}
|
||||||
|
{"classification": "Oxidoreductase", "pdb": "7MCI", "deposition_date": "2021-04-02", "title": "Mofe protein from azotobacter vinelandii with a sulfur-replenished Cofactor", "Keywords": ["Zotobacter vinelandii", " mofe-protein", " nitrogenase", " oxidoreductase"], "authors": ["W.kang", "C.lee", "Y.hu", "M.w.ribbe"], "doi": "10.1038/S41929-022-00782-7"}
|
||||||
|
{"classification": "Dna", "pdb": "1XUW", "deposition_date": "2004-10-26", "title": "Structural rationalization of a large difference in rna affinity Despite a small difference in chemistry between two 2'-o-modified Nucleic acid analogs", "Keywords": ["Na mimetic methylcarbamate amide analog", " dna"], "authors": ["R.pattanayek", "L.sethaphong", "C.pan", "M.prhavc", "T.p.prakash", "M.manoharan", "", "M.egli"], "pmid": "15547979", "doi": "10.1021/JA044637K"}
|
||||||
|
{"classification": "Lyase", "pdb": "7C0D", "deposition_date": "2020-05-01", "title": "Crystal structure of azospirillum brasilense l-2-keto-3-deoxyarabonate Dehydratase (hydroxypyruvate-bound form)", "Keywords": ["-2-keto-3-deoxyarabonate dehydratase", " lyase"], "authors": ["Y.watanabe", "S.watanabe"], "pmid": "32697085", "doi": "10.1021/ACS.BIOCHEM.0C00515"}
|
||||||
|
{"classification": "Signaling protein", "pdb": "5LYK", "deposition_date": "2016-09-28", "title": "Crystal structure of intracellular b30.2 domain of btn3a1 bound to Citrate", "Keywords": ["30.2", " butyrophilin", " signaling protein"], "authors": ["F.mohammed", "A.t.baker", "M.salim", "B.e.willcox"], "pmid": "28862425", "doi": "10.1021/ACSCHEMBIO.7B00694"}
|
||||||
|
{"classification": "Toxin", "pdb": "4IZL", "deposition_date": "2013-01-30", "title": "Structure of the n248a mutant of the panton-valentine leucocidin s Component from staphylococcus aureus", "Keywords": ["I-component leucotoxin", " staphylococcus aureus", " s component", "Leucocidin", "Beta-barrel pore forming toxin", "Toxin"], "authors": ["L.maveyraud", "B.j.laventie", "G.prevost", "L.mourey"], "pmid": "24643034", "doi": "10.1371/JOURNAL.PONE.0092094"}
|
||||||
|
{"classification": "Dna", "pdb": "6F3C", "deposition_date": "2017-11-28", "title": "The cytotoxic [pt(h2bapbpy)] platinum complex interacting with the Cgtacg hexamer", "Keywords": ["Rug-dna complex", " four-way junction", " dna"], "authors": ["M.ferraroni", "C.bazzicalupi", "P.gratteri", "F.papi"], "pmid": "31046177", "doi": "10.1002/ANIE.201814532"}
|
||||||
|
{"classification": "Signaling protein/inhibitor", "pdb": "4L5M", "deposition_date": "2013-06-11", "title": "Complexe of arno sec7 domain with the protein-protein interaction Inhibitor n-(4-hydroxy-2,6-dimethylphenyl)benzenesulfonamide at ph6.5", "Keywords": ["Ec-7domain", " signaling protein-inhibitor complex"], "authors": ["F.hoh", "J.rouhana"], "pmid": "24112024", "doi": "10.1021/JM4009357"}
|
||||||
|
{"classification": "Signaling protein", "pdb": "5I6J", "deposition_date": "2016-02-16", "title": "Crystal structure of srgap2 f-barx", "Keywords": ["Rgap2", " f-bar", " fx", " signaling protein"], "authors": ["M.sporny", "J.guez-haddad", "M.n.isupov", "Y.opatowsky"], "pmid": "28333212", "doi": "10.1093/MOLBEV/MSX094"}
|
||||||
|
{"classification": "Metal binding protein", "pdb": "1Q80", "deposition_date": "2003-08-20", "title": "Solution structure and dynamics of nereis sarcoplasmic calcium binding Protein", "Keywords": ["Ll-alpha", " metal binding protein"], "authors": ["G.rabah", "R.popescu", "J.a.cox", "Y.engelborghs", "C.t.craescu"], "pmid": "15819893", "doi": "10.1111/J.1742-4658.2005.04629.X"}
|
||||||
|
{"classification": "Transferase", "pdb": "1TW1", "deposition_date": "2004-06-30", "title": "Beta-1,4-galactosyltransferase mutant met344his (m344h-gal-t1) complex With udp-galactose and magnesium", "Keywords": ["Et344his mutation; closed conformation; mn binding", " transferase"], "authors": ["B.ramakrishnan", "E.boeggeman", "P.k.qasba"], "pmid": "15449940", "doi": "10.1021/BI049007+"}
|
||||||
|
{"classification": "Rna", "pdb": "2PN4", "deposition_date": "2007-04-23", "title": "Crystal structure of hepatitis c virus ires subdomain iia", "Keywords": ["Cv", " ires", " subdoamin iia", " rna", " strontium", " hepatitis"], "authors": ["Q.zhao", "Q.han", "C.r.kissinger", "P.a.thompson"], "pmid": "18391410", "doi": "10.1107/S0907444908002011"}
|
|
@ -1,6 +1,36 @@
|
||||||
{"pid": "Q6GZX4", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 41."}], "title": "Putative transcription factor 001R;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX4", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 43"}], "title": "Putative transcription factor 001R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
{"pid": "Q6GZX3", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 42."}], "title": "Uncharacterized protein 002L;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX3", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 45"}], "title": "Uncharacterized protein 002L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
{"pid": "Q197F8", "dates": [{"date": "16-JUN-2009", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "11-JUL-2006", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 27."}], "title": "Uncharacterized protein 002R;", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus."], "references": [{"PubMed": "16912294"}, {" DOI": "10.1128/jvi.00464-06"}]}
|
{"pid": " Q197F8", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-02-23", "date_info": "entry version 29"}], "title": "Uncharacterized protein 002R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
{"pid": "Q197F7", "dates": [{"date": "16-JUN-2009", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "11-JUL-2006", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 23."}], "title": "Uncharacterized protein 003L;", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus."], "references": [{"PubMed": "16912294"}, {" DOI": "10.1128/jvi.00464-06"}]}
|
{"pid": " Q197F7", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2020-08-12", "date_info": "entry version 23"}], "title": "Uncharacterized protein 003L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
{"pid": "Q6GZX2", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 36."}], "title": "Uncharacterized protein 3R;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX2", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 3R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
{"pid": "Q6GZX1", "dates": [{"date": "28-JUN-2011", "date_info": " integrated into UniProtKB/Swiss-Prot."}, {"date": "19-JUL-2004", "date_info": " sequence version 1."}, {"date": "12-AUG-2020", "date_info": " entry version 34."}], "title": "Uncharacterized protein 004R;", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3).", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus."], "references": [{"PubMed": "15165820"}, {" DOI": "10.1016/j.virol.2004.02.019"}]}
|
{"pid": " Q6GZX1", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 38"}], "title": "Uncharacterized protein 004R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197F5", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-10-12", "date_info": "entry version 32"}], "title": "Uncharacterized protein 005L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZX0", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 47"}], "title": "Uncharacterized protein 005R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q91G88", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2001-12-01", "date_info": "sequence version 1"}, {"date": "2023-06-28", "date_info": "entry version 53"}], "title": "Putative KilA-N domain-containing protein 006L", "organism_species": "Invertebrate iridescent virus 6 (IIV-6) (Chilo iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Iridovirus"], "references": [{"PubMed": "17239238"}, {"DOI": "10.1186/1743-422x-4-11"}]}
|
||||||
|
{"pid": " Q6GZW9", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 34"}], "title": "Uncharacterized protein 006R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW8", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 32"}], "title": "Uncharacterized protein 007R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197F3", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 28"}], "title": "Uncharacterized protein 007R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q197F2", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-02-23", "date_info": "entry version 22"}], "title": "Uncharacterized protein 008L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZW6", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 67"}], "title": "Putative helicase 009L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q91G85", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2001-12-01", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 38"}], "title": "Uncharacterized protein 009R", "organism_species": "Invertebrate iridescent virus 6 (IIV-6) (Chilo iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Iridovirus"], "references": [{"PubMed": "17239238"}, {"DOI": "10.1186/1743-422x-4-11"}]}
|
||||||
|
{"pid": " Q6GZW5", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 010R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197E9", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 28"}], "title": "Uncharacterized protein 011L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZW4", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 011R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW3", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 012L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197E7", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 37"}], "title": "Uncharacterized protein IIV3-013L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZW2", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 30"}], "title": "Uncharacterized protein 013R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW1", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 014R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZW0", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 50"}], "title": "Uncharacterized protein 015R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV8", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 017L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV7", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 33"}], "title": "Uncharacterized protein 018L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV6", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 87"}], "title": "Putative serine/threonine-protein kinase 019R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV5", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 40"}], "title": "Uncharacterized protein 020R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZV4", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 35"}], "title": "Uncharacterized protein 021L", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197D8", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-12-14", "date_info": "entry version 35"}], "title": "Transmembrane protein 022L", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZV2", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 33"}], "title": "Uncharacterized protein 023R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197D7", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2023-02-22", "date_info": "entry version 25"}], "title": "Uncharacterized protein 023R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q6GZV1", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 37"}], "title": "Uncharacterized protein 024R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q197D5", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2006-07-11", "date_info": "sequence version 1"}, {"date": "2022-10-12", "date_info": "entry version 24"}], "title": "Uncharacterized protein 025R", "organism_species": "Invertebrate iridescent virus 3 (IIV-3) (Mosquito iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Chloriridovirus"], "references": [{"PubMed": "16912294"}, {"DOI": "10.1128/jvi.00464-06"}]}
|
||||||
|
{"pid": " Q91G70", "dates": [{"date": "2009-06-16", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2001-12-01", "date_info": "sequence version 1"}, {"date": "2020-08-12", "date_info": "entry version 32"}], "title": "Uncharacterized protein 026R", "organism_species": "Invertebrate iridescent virus 6 (IIV-6) (Chilo iridescent virus)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Betairidovirinae", "Iridovirus"], "references": [{"PubMed": "17239238"}, {"DOI": "10.1186/1743-422x-4-11"}]}
|
||||||
|
{"pid": " Q6GZU9", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 49"}], "title": "Uncharacterized protein 027R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
||||||
|
{"pid": " Q6GZU8", "dates": [{"date": "2011-06-28", "date_info": "integrated into UniProtKB/Swiss-Prot"}, {"date": "2004-07-19", "date_info": "sequence version 1"}, {"date": "2023-09-13", "date_info": "entry version 55"}], "title": "Uncharacterized protein 028R", "organism_species": "Frog virus 3 (isolate Goorha) (FV-3)", "subjects": ["Viruses", "Varidnaviria", "Bamfordvirae", "Nucleocytoviricota", "Megaviricetes", "Pimascovirales", "Iridoviridae", "Alphairidovirinae", "Ranavirus"], "references": [{"PubMed": "15165820"}, {"DOI": "10.1016/j.virol.2004.02.019"}]}
|
|
@ -26,7 +26,7 @@ class MAGMappingTest {
|
||||||
@Test
|
@Test
|
||||||
def mappingMagType(): Unit = {
|
def mappingMagType(): Unit = {
|
||||||
|
|
||||||
checkResult[Publication](MagUtility.createResultFromType(null, null), invisible = false, "Other literature type")
|
checkResult[Publication](MagUtility.createResultFromType(null, null), invisible = true, "Other literature type")
|
||||||
checkResult[Publication](
|
checkResult[Publication](
|
||||||
MagUtility.createResultFromType(Some("BookChapter"), null),
|
MagUtility.createResultFromType(Some("BookChapter"), null),
|
||||||
invisible = false,
|
invisible = false,
|
||||||
|
|
|
@ -70,9 +70,8 @@ public class PrepareRelatedProjectsJob {
|
||||||
|
|
||||||
final Dataset<Relation> rels = ClusterUtils
|
final Dataset<Relation> rels = ClusterUtils
|
||||||
.loadRelations(graphPath, spark)
|
.loadRelations(graphPath, spark)
|
||||||
.filter((FilterFunction<Relation>) r -> r.getDataInfo().getDeletedbyinference())
|
.filter((FilterFunction<Relation>) r -> ModelConstants.RESULT_PROJECT.equals(r.getRelType()))
|
||||||
.filter((FilterFunction<Relation>) r -> r.getRelType().equals(ModelConstants.RESULT_PROJECT))
|
.filter((FilterFunction<Relation>) r -> !BrokerConstants.IS_MERGED_IN_CLASS.equals(r.getRelClass()))
|
||||||
.filter((FilterFunction<Relation>) r -> !r.getRelClass().equals(BrokerConstants.IS_MERGED_IN_CLASS))
|
|
||||||
.filter((FilterFunction<Relation>) r -> !ClusterUtils.isDedupRoot(r.getSource()))
|
.filter((FilterFunction<Relation>) r -> !ClusterUtils.isDedupRoot(r.getSource()))
|
||||||
.filter((FilterFunction<Relation>) r -> !ClusterUtils.isDedupRoot(r.getTarget()));
|
.filter((FilterFunction<Relation>) r -> !ClusterUtils.isDedupRoot(r.getTarget()));
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class EnrichMoreSubject extends UpdateMatcher<OaBrokerTypedValue> {
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
return source
|
return source
|
||||||
.getPids()
|
.getSubjects()
|
||||||
.stream()
|
.stream()
|
||||||
.filter(s -> !existingSubjects.contains(subjectAsString(s)))
|
.filter(s -> !existingSubjects.contains(subjectAsString(s)))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
|
@ -0,0 +1,60 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.oa.matchers.simple;
|
||||||
|
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.objects.OaBrokerMainEntity;
|
||||||
|
import eu.dnetlib.broker.objects.OaBrokerTypedValue;
|
||||||
|
|
||||||
|
public class EnrichMoreSubjectTest {
|
||||||
|
|
||||||
|
final EnrichMoreSubject matcher = new EnrichMoreSubject();
|
||||||
|
|
||||||
|
@BeforeEach
|
||||||
|
void setUp() throws Exception {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_1() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
final List<OaBrokerTypedValue> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertTrue(list.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_2() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
source.setSubjects(Arrays.asList(new OaBrokerTypedValue("arxiv", "subject_01")));
|
||||||
|
final List<OaBrokerTypedValue> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertEquals(1, list.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_3() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
target.setSubjects(Arrays.asList(new OaBrokerTypedValue("arxiv", "subject_01")));
|
||||||
|
final List<OaBrokerTypedValue> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertTrue(list.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testFindDifferences_4() {
|
||||||
|
final OaBrokerMainEntity source = new OaBrokerMainEntity();
|
||||||
|
final OaBrokerMainEntity target = new OaBrokerMainEntity();
|
||||||
|
source.setSubjects(Arrays.asList(new OaBrokerTypedValue("arxiv", "subject_01")));
|
||||||
|
target.setSubjects(Arrays.asList(new OaBrokerTypedValue("arxiv", "subject_01")));
|
||||||
|
final List<OaBrokerTypedValue> list = this.matcher.findDifferences(source, target);
|
||||||
|
assertTrue(list.isEmpty());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -17,45 +17,6 @@ import eu.dnetlib.pace.tree.support.TreeStats;
|
||||||
|
|
||||||
class DecisionTreeTest {
|
class DecisionTreeTest {
|
||||||
|
|
||||||
@Test
|
|
||||||
void testJPath() throws IOException {
|
|
||||||
|
|
||||||
DedupConfig conf = DedupConfig
|
|
||||||
.load(IOUtils.toString(getClass().getResourceAsStream("dedup_conf_organization.json")));
|
|
||||||
|
|
||||||
final String org = IOUtils.toString(getClass().getResourceAsStream("organization.json"));
|
|
||||||
|
|
||||||
Row row = SparkModel.apply(conf).rowFromJson(org);
|
|
||||||
|
|
||||||
System.out.println("row = " + row);
|
|
||||||
Assertions.assertNotNull(row);
|
|
||||||
Assertions.assertTrue(StringUtils.isNotBlank(row.getAs("identifier")));
|
|
||||||
|
|
||||||
System.out.println("row = " + row.getAs("countrytitle"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void jsonToModelTest() throws IOException {
|
|
||||||
DedupConfig conf = DedupConfig
|
|
||||||
.load(
|
|
||||||
IOUtils
|
|
||||||
.toString(
|
|
||||||
SparkOpenorgsDedupTest.class
|
|
||||||
.getResourceAsStream(
|
|
||||||
"/eu/dnetlib/dhp/dedup/conf/org.curr.conf.json")));
|
|
||||||
|
|
||||||
final String org = IOUtils.toString(getClass().getResourceAsStream("organization_example1.json"));
|
|
||||||
|
|
||||||
Row row = SparkModel.apply(conf).rowFromJson(org);
|
|
||||||
// to check that the same parsing returns the same row
|
|
||||||
Row row1 = SparkModel.apply(conf).rowFromJson(org);
|
|
||||||
|
|
||||||
Assertions.assertEquals(row, row1);
|
|
||||||
System.out.println("row = " + row);
|
|
||||||
Assertions.assertNotNull(row);
|
|
||||||
Assertions.assertTrue(StringUtils.isNotBlank(row.getAs("identifier")));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void organizationDecisionTreeTest() throws Exception {
|
void organizationDecisionTreeTest() throws Exception {
|
||||||
DedupConfig conf = DedupConfig
|
DedupConfig conf = DedupConfig
|
||||||
|
|
|
@ -452,18 +452,18 @@ public class SparkDedupTest implements Serializable {
|
||||||
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
||||||
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
||||||
assertEquals(ModelConstants.IS_MERGED_IN, r.getRelClass());
|
assertEquals(ModelConstants.IS_MERGED_IN, r.getRelClass());
|
||||||
assertTrue(dups.contains(r.getTarget()));
|
assertFalse(dups.contains(r.getTarget()));
|
||||||
});
|
});
|
||||||
|
|
||||||
final List<Relation> mergedIn = pubs
|
final List<Relation> mergedIn = pubs
|
||||||
.filter("target == '50|arXiv_dedup_::c93aeb433eb90ed7a86e29be00791b7c'")
|
.filter("target == '50|arXiv_dedup_::c93aeb433eb90ed7a86e29be00791b7c'")
|
||||||
.collectAsList();
|
.collectAsList();
|
||||||
assertEquals(3, mergedIn.size());
|
assertEquals(1, mergedIn.size());
|
||||||
mergedIn.forEach(r -> {
|
mergedIn.forEach(r -> {
|
||||||
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
assertEquals(ModelConstants.RESULT_RESULT, r.getRelType());
|
||||||
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
assertEquals(ModelConstants.DEDUP, r.getSubRelType());
|
||||||
assertEquals(ModelConstants.IS_MERGED_IN, r.getRelClass());
|
assertEquals(ModelConstants.MERGES, r.getRelClass());
|
||||||
assertTrue(dups.contains(r.getSource()));
|
assertFalse(dups.contains(r.getSource()));
|
||||||
});
|
});
|
||||||
|
|
||||||
System.out.println("orgs_mergerel = " + orgs_mergerel);
|
System.out.println("orgs_mergerel = " + orgs_mergerel);
|
||||||
|
@ -473,8 +473,8 @@ public class SparkDedupTest implements Serializable {
|
||||||
System.out.println("orp_mergerel = " + orp_mergerel);
|
System.out.println("orp_mergerel = " + orp_mergerel);
|
||||||
|
|
||||||
if (CHECK_CARDINALITIES) {
|
if (CHECK_CARDINALITIES) {
|
||||||
assertEquals(1268, orgs_mergerel);
|
assertEquals(1278, orgs_mergerel);
|
||||||
assertEquals(1156, pubs.count());
|
assertEquals(1158, pubs.count());
|
||||||
assertEquals(292, sw_mergerel);
|
assertEquals(292, sw_mergerel);
|
||||||
assertEquals(476, ds_mergerel);
|
assertEquals(476, ds_mergerel);
|
||||||
assertEquals(742, orp_mergerel);
|
assertEquals(742, orp_mergerel);
|
||||||
|
|
|
@ -241,7 +241,6 @@ public class SparkPublicationRootsTest implements Serializable {
|
||||||
|
|
||||||
verifyRoot_case_1(roots, pubs);
|
verifyRoot_case_1(roots, pubs);
|
||||||
verifyRoot_case_2(roots, pubs);
|
verifyRoot_case_2(roots, pubs);
|
||||||
verifyRoot_case_3(roots, pubs);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void verifyRoot_case_1(Dataset<Publication> roots, Dataset<Publication> pubs) {
|
private static void verifyRoot_case_1(Dataset<Publication> roots, Dataset<Publication> pubs) {
|
||||||
|
@ -322,34 +321,6 @@ public class SparkPublicationRootsTest implements Serializable {
|
||||||
assertTrue(Sets.difference(root_cf, dups_cf).isEmpty());
|
assertTrue(Sets.difference(root_cf, dups_cf).isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verifyRoot_case_3(Dataset<Publication> roots, Dataset<Publication> pubs) {
|
|
||||||
Publication root = roots
|
|
||||||
.filter("id = '50|dedup_wf_001::31ca734cc22181b704c4aa8fd050062a'")
|
|
||||||
.first();
|
|
||||||
assertNotNull(root);
|
|
||||||
|
|
||||||
Publication pivot_duplicate = pubs
|
|
||||||
.filter("id = '50|od_______166::31ca734cc22181b704c4aa8fd050062a'")
|
|
||||||
.first();
|
|
||||||
|
|
||||||
assertEquals(pivot_duplicate.getPublisher().getValue(), root.getPublisher().getValue());
|
|
||||||
|
|
||||||
Set<String> dups_cf = pubs
|
|
||||||
.collectAsList()
|
|
||||||
.stream()
|
|
||||||
.flatMap(p -> p.getCollectedfrom().stream())
|
|
||||||
.map(KeyValue::getValue)
|
|
||||||
.collect(Collectors.toCollection(HashSet::new));
|
|
||||||
|
|
||||||
Set<String> root_cf = root
|
|
||||||
.getCollectedfrom()
|
|
||||||
.stream()
|
|
||||||
.map(KeyValue::getValue)
|
|
||||||
.collect(Collectors.toCollection(HashSet::new));
|
|
||||||
|
|
||||||
assertTrue(Sets.difference(root_cf, dups_cf).isEmpty());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(6)
|
@Order(6)
|
||||||
void updateEntityTest() throws Exception {
|
void updateEntityTest() throws Exception {
|
||||||
|
|
|
@ -143,7 +143,9 @@ public class SparkPublicationRootsTest2 implements Serializable {
|
||||||
"--graphBasePath", graphInputPath,
|
"--graphBasePath", graphInputPath,
|
||||||
"--actionSetId", testActionSetId,
|
"--actionSetId", testActionSetId,
|
||||||
"--isLookUpUrl", "lookupurl",
|
"--isLookUpUrl", "lookupurl",
|
||||||
"--workingPath", workingPath
|
"--workingPath", workingPath,
|
||||||
|
"--hiveMetastoreUris", "none",
|
||||||
|
"--pivotHistoryDatabase", ""
|
||||||
}), spark)
|
}), spark)
|
||||||
.run(isLookUpService);
|
.run(isLookUpService);
|
||||||
|
|
||||||
|
@ -153,7 +155,7 @@ public class SparkPublicationRootsTest2 implements Serializable {
|
||||||
.as(Encoders.bean(Relation.class));
|
.as(Encoders.bean(Relation.class));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
3, merges
|
4, merges
|
||||||
.filter("relclass == 'isMergedIn'")
|
.filter("relclass == 'isMergedIn'")
|
||||||
.map((MapFunction<Relation, String>) Relation::getTarget, Encoders.STRING())
|
.map((MapFunction<Relation, String>) Relation::getTarget, Encoders.STRING())
|
||||||
.distinct()
|
.distinct()
|
||||||
|
@ -178,7 +180,7 @@ public class SparkPublicationRootsTest2 implements Serializable {
|
||||||
.textFile(workingPath + "/" + testActionSetId + "/publication_deduprecord")
|
.textFile(workingPath + "/" + testActionSetId + "/publication_deduprecord")
|
||||||
.map(asEntity(Publication.class), Encoders.bean(Publication.class));
|
.map(asEntity(Publication.class), Encoders.bean(Publication.class));
|
||||||
|
|
||||||
assertEquals(3, roots.count());
|
assertEquals(4, roots.count());
|
||||||
|
|
||||||
final Dataset<Publication> pubs = spark
|
final Dataset<Publication> pubs = spark
|
||||||
.read()
|
.read()
|
||||||
|
@ -195,7 +197,7 @@ public class SparkPublicationRootsTest2 implements Serializable {
|
||||||
.collectAsList()
|
.collectAsList()
|
||||||
.get(0);
|
.get(0);
|
||||||
|
|
||||||
assertEquals(crossref_duplicate.getDateofacceptance().getValue(), root.getDateofacceptance().getValue());
|
assertEquals("2022-01-01", root.getDateofacceptance().getValue());
|
||||||
assertEquals(crossref_duplicate.getJournal().getName(), root.getJournal().getName());
|
assertEquals(crossref_duplicate.getJournal().getName(), root.getJournal().getName());
|
||||||
assertEquals(crossref_duplicate.getJournal().getIssnPrinted(), root.getJournal().getIssnPrinted());
|
assertEquals(crossref_duplicate.getJournal().getIssnPrinted(), root.getJournal().getIssnPrinted());
|
||||||
assertEquals(crossref_duplicate.getPublisher().getValue(), root.getPublisher().getValue());
|
assertEquals(crossref_duplicate.getPublisher().getValue(), root.getPublisher().getValue());
|
||||||
|
|
|
@ -168,7 +168,7 @@ public class SparkStatsTest implements Serializable {
|
||||||
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_blockstats")
|
.load(testOutputBasePath + "/" + testActionSetId + "/otherresearchproduct_blockstats")
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assertEquals(414, orgs_blocks);
|
assertEquals(412, orgs_blocks);
|
||||||
assertEquals(221, pubs_blocks);
|
assertEquals(221, pubs_blocks);
|
||||||
assertEquals(134, sw_blocks);
|
assertEquals(134, sw_blocks);
|
||||||
assertEquals(196, ds_blocks);
|
assertEquals(196, ds_blocks);
|
||||||
|
|
|
@ -73,12 +73,6 @@
|
||||||
"name": "Irish Nephrology Society",
|
"name": "Irish Nephrology Society",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "100011062",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100011062",
|
|
||||||
"name": "Asian Spinal Cord Network",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "100011096",
|
"id": "100011096",
|
||||||
"uri": "http://dx.doi.org/10.13039/100011096",
|
"uri": "http://dx.doi.org/10.13039/100011096",
|
||||||
|
@ -223,12 +217,6 @@
|
||||||
"name": "Global Brain Health Institute",
|
"name": "Global Brain Health Institute",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "100015776",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/100015776",
|
|
||||||
"name": "Health and Social Care Board",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "100015992",
|
"id": "100015992",
|
||||||
"uri": "http://dx.doi.org/10.13039/100015992",
|
"uri": "http://dx.doi.org/10.13039/100015992",
|
||||||
|
@ -403,18 +391,6 @@
|
||||||
"name": "Irish Hospice Foundation",
|
"name": "Irish Hospice Foundation",
|
||||||
"synonym": []
|
"synonym": []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "501100001596",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001596",
|
|
||||||
"name": "Irish Research Council for Science, Engineering and Technology",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "501100001597",
|
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001597",
|
|
||||||
"name": "Irish Research Council for the Humanities and Social Sciences",
|
|
||||||
"synonym": []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "501100001598",
|
"id": "501100001598",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100001598",
|
"uri": "http://dx.doi.org/10.13039/501100001598",
|
||||||
|
@ -515,7 +491,7 @@
|
||||||
"id": "501100002081",
|
"id": "501100002081",
|
||||||
"uri": "http://dx.doi.org/10.13039/501100002081",
|
"uri": "http://dx.doi.org/10.13039/501100002081",
|
||||||
"name": "Irish Research Council",
|
"name": "Irish Research Council",
|
||||||
"synonym": []
|
"synonym": ["501100001596", "501100001597"]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "501100002736",
|
"id": "501100002736",
|
||||||
|
|
|
@ -560,7 +560,15 @@ case object Crossref2Oaf {
|
||||||
"10.13039/501100000266" | "10.13039/501100006041" | "10.13039/501100000265" | "10.13039/501100000270" |
|
"10.13039/501100000266" | "10.13039/501100006041" | "10.13039/501100000265" | "10.13039/501100000270" |
|
||||||
"10.13039/501100013589" | "10.13039/501100000271" =>
|
"10.13039/501100013589" | "10.13039/501100000271" =>
|
||||||
generateSimpleRelationFromAward(funder, "ukri________", a => a)
|
generateSimpleRelationFromAward(funder, "ukri________", a => a)
|
||||||
|
//HFRI
|
||||||
|
case "10.13039/501100013209" =>
|
||||||
|
generateSimpleRelationFromAward(funder, "hfri________", a => a)
|
||||||
|
val targetId = getProjectId("hfri________", "1e5e62235d094afd01cd56e65112fc63")
|
||||||
|
queue += generateRelation(sourceId, targetId, ModelConstants.IS_PRODUCED_BY)
|
||||||
|
queue += generateRelation(targetId, sourceId, ModelConstants.PRODUCES)
|
||||||
|
//ERASMUS+
|
||||||
|
case "10.13039/501100010790" =>
|
||||||
|
generateSimpleRelationFromAward(funder, "erasmusplus_", a => a)
|
||||||
case _ => logger.debug("no match for " + funder.DOI.get)
|
case _ => logger.debug("no match for " + funder.DOI.get)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,13 +13,13 @@ public class CommunityContentprovider {
|
||||||
private String openaireId;
|
private String openaireId;
|
||||||
private SelectionConstraints selectioncriteria;
|
private SelectionConstraints selectioncriteria;
|
||||||
|
|
||||||
private String enabled;
|
private Boolean enabled;
|
||||||
|
|
||||||
public String getEnabled() {
|
public Boolean getEnabled() {
|
||||||
return enabled;
|
return enabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setEnabled(String enabled) {
|
public void setEnabled(Boolean enabled) {
|
||||||
this.enabled = enabled;
|
this.enabled = enabled;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -53,6 +53,8 @@ public class Constraints implements Serializable {
|
||||||
|
|
||||||
for (Constraint sc : constraint) {
|
for (Constraint sc : constraint) {
|
||||||
boolean verified = false;
|
boolean verified = false;
|
||||||
|
if (!param.containsKey(sc.getField()))
|
||||||
|
return false;
|
||||||
for (String value : param.get(sc.getField())) {
|
for (String value : param.get(sc.getField())) {
|
||||||
if (sc.verifyCriteria(value.trim())) {
|
if (sc.verifyCriteria(value.trim())) {
|
||||||
verified = true;
|
verified = true;
|
||||||
|
|
|
@ -14,6 +14,7 @@ import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.function.MapFunction;
|
import org.apache.spark.api.java.function.MapFunction;
|
||||||
import org.apache.spark.sql.Dataset;
|
import org.apache.spark.sql.Dataset;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.Row;
|
||||||
import org.apache.spark.sql.SaveMode;
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
@ -84,19 +85,26 @@ public class SparkCountryPropagationJob {
|
||||||
Dataset<R> res = readPath(spark, sourcePath, resultClazz);
|
Dataset<R> res = readPath(spark, sourcePath, resultClazz);
|
||||||
|
|
||||||
log.info("Reading prepared info: {}", preparedInfoPath);
|
log.info("Reading prepared info: {}", preparedInfoPath);
|
||||||
Dataset<ResultCountrySet> prepared = spark
|
final Dataset<Row> preparedInfoRaw = spark
|
||||||
.read()
|
.read()
|
||||||
.json(preparedInfoPath)
|
.json(preparedInfoPath);
|
||||||
.as(Encoders.bean(ResultCountrySet.class));
|
|
||||||
|
|
||||||
res
|
|
||||||
.joinWith(prepared, res.col("id").equalTo(prepared.col("resultId")), "left_outer")
|
|
||||||
.map(getCountryMergeFn(), Encoders.bean(resultClazz))
|
|
||||||
.write()
|
|
||||||
.option("compression", "gzip")
|
|
||||||
.mode(SaveMode.Overwrite)
|
|
||||||
.json(outputPath);
|
|
||||||
|
|
||||||
|
if (!preparedInfoRaw.isEmpty()) {
|
||||||
|
final Dataset<ResultCountrySet> prepared = preparedInfoRaw.as(Encoders.bean(ResultCountrySet.class));
|
||||||
|
res
|
||||||
|
.joinWith(prepared, res.col("id").equalTo(prepared.col("resultId")), "left_outer")
|
||||||
|
.map(getCountryMergeFn(), Encoders.bean(resultClazz))
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(outputPath);
|
||||||
|
} else {
|
||||||
|
res
|
||||||
|
.write()
|
||||||
|
.option("compression", "gzip")
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.json(outputPath);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <R extends Result> MapFunction<Tuple2<R, ResultCountrySet>, R> getCountryMergeFn() {
|
private static <R extends Result> MapFunction<Tuple2<R, ResultCountrySet>, R> getCountryMergeFn() {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -147,6 +147,7 @@ public class CleanGraphSparkJob {
|
||||||
.map((MapFunction<T, T>) GraphCleaningFunctions::fixVocabularyNames, Encoders.bean(clazz))
|
.map((MapFunction<T, T>) GraphCleaningFunctions::fixVocabularyNames, Encoders.bean(clazz))
|
||||||
.map((MapFunction<T, T>) value -> OafCleaner.apply(value, mapping), Encoders.bean(clazz))
|
.map((MapFunction<T, T>) value -> OafCleaner.apply(value, mapping), Encoders.bean(clazz))
|
||||||
.map((MapFunction<T, T>) value -> GraphCleaningFunctions.cleanup(value, vocs), Encoders.bean(clazz))
|
.map((MapFunction<T, T>) value -> GraphCleaningFunctions.cleanup(value, vocs), Encoders.bean(clazz))
|
||||||
|
.map((MapFunction<T, T>) GraphCleaningFunctions::dedicatedUglyHacks, Encoders.bean(clazz))
|
||||||
.filter((FilterFunction<T>) GraphCleaningFunctions::filter);
|
.filter((FilterFunction<T>) GraphCleaningFunctions::filter);
|
||||||
|
|
||||||
// read the master-duplicate tuples
|
// read the master-duplicate tuples
|
||||||
|
|
|
@ -55,29 +55,7 @@ import eu.dnetlib.dhp.common.Constants;
|
||||||
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
import eu.dnetlib.dhp.common.vocabulary.VocabularyGroup;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
import eu.dnetlib.dhp.schema.common.ModelConstants;
|
||||||
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
import eu.dnetlib.dhp.schema.common.ModelSupport;
|
||||||
import eu.dnetlib.dhp.schema.oaf.AccessRight;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.Author;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Context;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Country;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.DataInfo;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Dataset;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.EoscIfGuidelines;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.GeoLocation;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Instance;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.InstanceTypeMapping;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Journal;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.KeyValue;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OAIProvenance;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OafEntity;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Publication;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Result;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Software;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.Subject;
|
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
|
||||||
|
|
||||||
|
@ -667,22 +645,25 @@ public abstract class AbstractMdRecordToOafMapper {
|
||||||
return this.vocs.getTermAsQualifier(schemeId, classId);
|
return this.vocs.getTermAsQualifier(schemeId, classId);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<StructuredProperty> prepareListStructPropsWithValidQualifier(
|
protected List<HashableStructuredProperty> prepareListStructPropsWithValidQualifier(
|
||||||
final Node node,
|
final Node node,
|
||||||
final String xpath,
|
final String xpath,
|
||||||
final String xpathClassId,
|
final String xpathClassId,
|
||||||
final String schemeId,
|
final String schemeId,
|
||||||
final DataInfo info) {
|
final DataInfo info) {
|
||||||
final List<StructuredProperty> res = new ArrayList<>();
|
final Set<HashableStructuredProperty> res = new HashSet<>();
|
||||||
|
|
||||||
for (final Object o : node.selectNodes(xpath)) {
|
for (final Object o : node.selectNodes(xpath)) {
|
||||||
final Node n = (Node) o;
|
final Node n = (Node) o;
|
||||||
final String classId = n.valueOf(xpathClassId).trim();
|
final String classId = n.valueOf(xpathClassId).trim();
|
||||||
if (this.vocs.termExists(schemeId, classId)) {
|
if (this.vocs.termExists(schemeId, classId)) {
|
||||||
res.add(structuredProperty(n.getText(), this.vocs.getTermAsQualifier(schemeId, classId), info));
|
res
|
||||||
|
.add(
|
||||||
|
HashableStructuredProperty
|
||||||
|
.newInstance(n.getText(), this.vocs.getTermAsQualifier(schemeId, classId), info));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return res;
|
return Lists.newArrayList(res);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected List<StructuredProperty> prepareListStructProps(
|
protected List<StructuredProperty> prepareListStructProps(
|
||||||
|
|
|
@ -25,6 +25,7 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.ModelHardLimits;
|
import eu.dnetlib.dhp.schema.oaf.utils.ModelHardLimits;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
|
||||||
public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
|
@ -380,7 +381,7 @@ public class OafToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
return prepareListStructPropsWithValidQualifier(
|
return prepareListStructPropsWithValidQualifier(
|
||||||
doc, "//oaf:identifier", "@identifierType", DNET_PID_TYPES, info)
|
doc, "//oaf:identifier", "@identifierType", DNET_PID_TYPES, info)
|
||||||
.stream()
|
.stream()
|
||||||
.map(CleaningFunctions::normalizePidValue)
|
.map(PidCleaner::normalizePidValue)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import eu.dnetlib.dhp.schema.common.RelationInverse;
|
||||||
import eu.dnetlib.dhp.schema.oaf.*;
|
import eu.dnetlib.dhp.schema.oaf.*;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
import eu.dnetlib.dhp.schema.oaf.utils.CleaningFunctions;
|
||||||
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
import eu.dnetlib.dhp.schema.oaf.utils.IdentifierFactory;
|
||||||
|
import eu.dnetlib.dhp.schema.oaf.utils.PidCleaner;
|
||||||
|
|
||||||
public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
|
@ -504,7 +505,7 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected List<StructuredProperty> prepareResultPids(final Document doc, final DataInfo info) {
|
protected List<StructuredProperty> prepareResultPids(final Document doc, final DataInfo info) {
|
||||||
final Set<StructuredProperty> res = new HashSet<>();
|
final Set<HashableStructuredProperty> res = new HashSet<>();
|
||||||
res
|
res
|
||||||
.addAll(
|
.addAll(
|
||||||
prepareListStructPropsWithValidQualifier(
|
prepareListStructPropsWithValidQualifier(
|
||||||
|
@ -524,7 +525,8 @@ public class OdfToOafMapper extends AbstractMdRecordToOafMapper {
|
||||||
|
|
||||||
return res
|
return res
|
||||||
.stream()
|
.stream()
|
||||||
.map(CleaningFunctions::normalizePidValue)
|
.map(PidCleaner::normalizePidValue)
|
||||||
|
.filter(CleaningFunctions::pidFilter)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -85,7 +85,7 @@
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<fork name="fork_downloads_csv">
|
<fork name="fork_downloads_csv">
|
||||||
<path start="download_gold"/>
|
<path start="download_gold"/>
|
||||||
<path start="download_doaj_json"/>
|
<path start="download_doaj_json"/>
|
||||||
</fork>
|
</fork>
|
||||||
|
@ -223,11 +223,13 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--hostedByMapPath</arg><arg>${hostedByMapPath}</arg>
|
<arg>--hostedByMapPath</arg><arg>${hostedByMapPath}</arg>
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
@ -253,11 +255,13 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
--conf spark.sql.warehouse.dir=${sparkSqlWarehouseDir}
|
||||||
|
--conf spark.sql.shuffle.partitions=15000
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
<arg>--outputPath</arg><arg>${outputPath}/publication</arg>
|
||||||
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
<arg>--preparedInfoPath</arg><arg>${workingDir}/preparedInfo</arg>
|
||||||
|
@ -278,6 +282,7 @@
|
||||||
--executor-memory=${sparkExecutorMemory}
|
--executor-memory=${sparkExecutorMemory}
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
--driver-memory=${sparkDriverMemory}
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.executor.memoryOverhead=${sparkExecutorMemory}
|
||||||
--conf spark.extraListeners=${spark2ExtraListeners}
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
|
|
@ -388,7 +388,7 @@ public class CleanGraphSparkJobTest {
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
assertNotNull(fos_subjects);
|
assertNotNull(fos_subjects);
|
||||||
assertEquals(2, fos_subjects.size());
|
assertEquals(3, fos_subjects.size());
|
||||||
|
|
||||||
assertTrue(
|
assertTrue(
|
||||||
fos_subjects
|
fos_subjects
|
||||||
|
@ -396,18 +396,10 @@ public class CleanGraphSparkJobTest {
|
||||||
.anyMatch(
|
.anyMatch(
|
||||||
s -> "0101 mathematics".equals(s.getValue()) &
|
s -> "0101 mathematics".equals(s.getValue()) &
|
||||||
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid()) &
|
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid()) &
|
||||||
"sysimport:crosswalk:datasetarchive"
|
"subject:fos".equals(s.getDataInfo().getProvenanceaction().getClassid())));
|
||||||
.equals(s.getDataInfo().getProvenanceaction().getClassid())));
|
|
||||||
|
|
||||||
assertTrue(
|
verify_keyword(p, "FOS: Mathematics");
|
||||||
fos_subjects
|
verify_keyword(p, "FOS: Computer and information sciences");
|
||||||
.stream()
|
|
||||||
.anyMatch(
|
|
||||||
s -> "0102 computer and information sciences".equals(s.getValue()) &
|
|
||||||
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid())));
|
|
||||||
|
|
||||||
verify_keyword(p, "In Situ Hybridization");
|
|
||||||
verify_keyword(p, "Avicennia");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -266,7 +266,7 @@ public class GraphCleaningFunctionsTest {
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
assertNotNull(fos_subjects);
|
assertNotNull(fos_subjects);
|
||||||
assertEquals(2, fos_subjects.size());
|
assertEquals(3, fos_subjects.size());
|
||||||
|
|
||||||
assertTrue(
|
assertTrue(
|
||||||
fos_subjects
|
fos_subjects
|
||||||
|
@ -274,18 +274,18 @@ public class GraphCleaningFunctionsTest {
|
||||||
.anyMatch(
|
.anyMatch(
|
||||||
s -> "0101 mathematics".equals(s.getValue()) &
|
s -> "0101 mathematics".equals(s.getValue()) &
|
||||||
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid()) &
|
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid()) &
|
||||||
"sysimport:crosswalk:datasetarchive"
|
"subject:fos".equals(s.getDataInfo().getProvenanceaction().getClassid())));
|
||||||
.equals(s.getDataInfo().getProvenanceaction().getClassid())));
|
|
||||||
|
|
||||||
assertTrue(
|
assertTrue(
|
||||||
fos_subjects
|
fos_subjects
|
||||||
.stream()
|
.stream()
|
||||||
.anyMatch(
|
.anyMatch(
|
||||||
s -> "0102 computer and information sciences".equals(s.getValue()) &
|
s -> "0102 computer and information sciences".equals(s.getValue()) &
|
||||||
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid())));
|
ModelConstants.DNET_SUBJECT_FOS_CLASSID.equals(s.getQualifier().getClassid()) &
|
||||||
|
"subject:fos".equals(s.getDataInfo().getProvenanceaction().getClassid())));
|
||||||
|
|
||||||
verify_keyword(p_cleaned, "In Situ Hybridization");
|
verify_keyword(p_cleaned, "FOS: Computer and information sciences");
|
||||||
verify_keyword(p_cleaned, "Avicennia");
|
verify_keyword(p_cleaned, "FOS: Mathematics");
|
||||||
|
|
||||||
// TODO add more assertions to verity the cleaned values
|
// TODO add more assertions to verity the cleaned values
|
||||||
System.out.println(MAPPER.writeValueAsString(p_cleaned));
|
System.out.println(MAPPER.writeValueAsString(p_cleaned));
|
||||||
|
|
|
@ -44,7 +44,7 @@ class GenerateEntitiesApplicationTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testMergeResult() throws IOException, DocumentException {
|
void testMergeResult() throws IOException {
|
||||||
Result publication = getResult("oaf_record.xml", Publication.class);
|
Result publication = getResult("oaf_record.xml", Publication.class);
|
||||||
Result dataset = getResult("odf_dataset.xml", Dataset.class);
|
Result dataset = getResult("odf_dataset.xml", Dataset.class);
|
||||||
Result software = getResult("odf_software.xml", Software.class);
|
Result software = getResult("odf_software.xml", Software.class);
|
||||||
|
@ -69,15 +69,15 @@ class GenerateEntitiesApplicationTest {
|
||||||
verifyMerge(orp, software, Software.class, ModelConstants.SOFTWARE_RESULTTYPE_CLASSID);
|
verifyMerge(orp, software, Software.class, ModelConstants.SOFTWARE_RESULTTYPE_CLASSID);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected <T extends Result> void verifyMerge(Result publication, Result dataset, Class<T> clazz,
|
protected <T extends Result> void verifyMerge(Result r1, Result r2, Class<T> clazz,
|
||||||
String resultType) {
|
String resultType) {
|
||||||
final Result merge = (Result) MergeUtils.merge(publication, dataset);
|
final Result merge = MergeUtils.checkedMerge(r1, r2, true);
|
||||||
assertTrue(clazz.isAssignableFrom(merge.getClass()));
|
assertTrue(clazz.isAssignableFrom(merge.getClass()));
|
||||||
assertEquals(resultType, merge.getResulttype().getClassid());
|
assertEquals(resultType, merge.getResulttype().getClassid());
|
||||||
}
|
}
|
||||||
|
|
||||||
protected <T extends Result> Result getResult(String xmlFileName, Class<T> clazz)
|
protected <T extends Result> Result getResult(String xmlFileName, Class<T> clazz)
|
||||||
throws IOException, DocumentException {
|
throws IOException {
|
||||||
final String xml = IOUtils.toString(getClass().getResourceAsStream(xmlFileName));
|
final String xml = IOUtils.toString(getClass().getResourceAsStream(xmlFileName));
|
||||||
return new OdfToOafMapper(vocs, false, true)
|
return new OdfToOafMapper(vocs, false, true)
|
||||||
.processMdRecord(xml)
|
.processMdRecord(xml)
|
||||||
|
|
|
@ -216,7 +216,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testPublication_PubMed() throws IOException, DocumentException {
|
void testPublication_PubMed() throws IOException {
|
||||||
|
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_record_pubmed.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_record_pubmed.xml")));
|
||||||
|
@ -264,8 +264,17 @@ class MappersTest {
|
||||||
|
|
||||||
assertFalse(p.getSubject().isEmpty());
|
assertFalse(p.getSubject().isEmpty());
|
||||||
assertFalse(p.getPid().isEmpty());
|
assertFalse(p.getPid().isEmpty());
|
||||||
assertEquals("PMC1517292", p.getPid().get(0).getValue());
|
|
||||||
assertEquals("pmc", p.getPid().get(0).getQualifier().getClassid());
|
assertTrue(p.getPid().stream().anyMatch(pi -> "pmc".equals(pi.getQualifier().getClassid())));
|
||||||
|
assertEquals(
|
||||||
|
"PMC1517292",
|
||||||
|
p
|
||||||
|
.getPid()
|
||||||
|
.stream()
|
||||||
|
.filter(pi -> "pmc".equals(pi.getQualifier().getClassid()))
|
||||||
|
.findFirst()
|
||||||
|
.get()
|
||||||
|
.getValue());
|
||||||
|
|
||||||
assertNotNull(p.getInstance());
|
assertNotNull(p.getInstance());
|
||||||
assertFalse(p.getInstance().isEmpty());
|
assertFalse(p.getInstance().isEmpty());
|
||||||
|
@ -292,7 +301,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testPublicationInvisible() throws IOException, DocumentException {
|
void testPublicationInvisible() throws IOException {
|
||||||
|
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_record.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_record.xml")));
|
||||||
|
|
||||||
|
@ -307,6 +316,25 @@ class MappersTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
void testPublicationInvisible_BASE() throws IOException {
|
||||||
|
|
||||||
|
final String xml = IOUtils
|
||||||
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_record_base.xml")));
|
||||||
|
|
||||||
|
final List<Oaf> list = new OdfToOafMapper(vocs, true, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
assertFalse(list.isEmpty());
|
||||||
|
assertTrue(list.get(0) instanceof Publication);
|
||||||
|
|
||||||
|
final Publication p = (Publication) list.get(0);
|
||||||
|
|
||||||
|
assertTrue(p.getDataInfo().getInvisible());
|
||||||
|
|
||||||
|
System.out.println(new ObjectMapper().writeValueAsString(p));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testOdfFwfEBookLibrary() throws IOException {
|
void testOdfFwfEBookLibrary() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
|
@ -318,7 +346,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testDataset() throws IOException, DocumentException {
|
void testDataset() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_dataset.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_dataset.xml")));
|
||||||
|
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
@ -332,19 +360,19 @@ class MappersTest {
|
||||||
final Relation r1 = (Relation) list.get(1);
|
final Relation r1 = (Relation) list.get(1);
|
||||||
final Relation r2 = (Relation) list.get(2);
|
final Relation r2 = (Relation) list.get(2);
|
||||||
|
|
||||||
assertEquals(d.getId(), r1.getSource());
|
assertEquals(d.getId(), r1.getTarget());
|
||||||
assertEquals("40|corda_______::e06332dee33bec6c2ba4c98601053229", r1.getTarget());
|
assertEquals("40|corda_______::e06332dee33bec6c2ba4c98601053229", r1.getSource());
|
||||||
assertEquals(ModelConstants.RESULT_PROJECT, r1.getRelType());
|
assertEquals(ModelConstants.RESULT_PROJECT, r1.getRelType());
|
||||||
assertEquals(ModelConstants.OUTCOME, r1.getSubRelType());
|
assertEquals(ModelConstants.OUTCOME, r1.getSubRelType());
|
||||||
assertEquals(ModelConstants.IS_PRODUCED_BY, r1.getRelClass());
|
assertEquals(ModelConstants.PRODUCES, r1.getRelClass());
|
||||||
assertTrue(r1.getValidated());
|
assertTrue(r1.getValidated());
|
||||||
assertEquals("2020-01-01", r1.getValidationDate());
|
assertEquals("2020-01-01", r1.getValidationDate());
|
||||||
|
|
||||||
assertEquals(d.getId(), r2.getTarget());
|
assertEquals(d.getId(), r2.getSource());
|
||||||
assertEquals("40|corda_______::e06332dee33bec6c2ba4c98601053229", r2.getSource());
|
assertEquals("40|corda_______::e06332dee33bec6c2ba4c98601053229", r2.getTarget());
|
||||||
assertEquals(ModelConstants.RESULT_PROJECT, r2.getRelType());
|
assertEquals(ModelConstants.RESULT_PROJECT, r2.getRelType());
|
||||||
assertEquals(ModelConstants.OUTCOME, r2.getSubRelType());
|
assertEquals(ModelConstants.OUTCOME, r2.getSubRelType());
|
||||||
assertEquals(ModelConstants.PRODUCES, r2.getRelClass());
|
assertEquals(ModelConstants.IS_PRODUCED_BY, r2.getRelClass());
|
||||||
assertTrue(r2.getValidated());
|
assertTrue(r2.getValidated());
|
||||||
assertEquals("2020-01-01", r2.getValidationDate());
|
assertEquals("2020-01-01", r2.getValidationDate());
|
||||||
|
|
||||||
|
@ -450,7 +478,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testOdfBielefeld() throws IOException, DocumentException {
|
void testOdfBielefeld() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_bielefeld.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_bielefeld.xml")));
|
||||||
|
|
||||||
|
@ -501,7 +529,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testOpentrial() throws IOException, DocumentException {
|
void testOpentrial() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_opentrial.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_opentrial.xml")));
|
||||||
|
|
||||||
|
@ -741,7 +769,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testSoftware() throws IOException, DocumentException {
|
void testSoftware() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_software.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_software.xml")));
|
||||||
|
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
@ -763,22 +791,21 @@ class MappersTest {
|
||||||
final Relation r1 = (Relation) list.get(1);
|
final Relation r1 = (Relation) list.get(1);
|
||||||
final Relation r2 = (Relation) list.get(2);
|
final Relation r2 = (Relation) list.get(2);
|
||||||
|
|
||||||
assertEquals(s.getId(), r1.getSource());
|
assertEquals(s.getId(), r1.getTarget());
|
||||||
assertEquals("50|doi_________::b453e7b4b2130ace57ff0c3db470a982", r1.getTarget());
|
assertEquals("50|doi_________::b453e7b4b2130ace57ff0c3db470a982", r1.getSource());
|
||||||
assertEquals(ModelConstants.RESULT_RESULT, r1.getRelType());
|
assertEquals(ModelConstants.RESULT_RESULT, r1.getRelType());
|
||||||
assertEquals(ModelConstants.RELATIONSHIP, r1.getSubRelType());
|
assertEquals(ModelConstants.RELATIONSHIP, r1.getSubRelType());
|
||||||
assertEquals(ModelConstants.IS_REFERENCED_BY, r1.getRelClass());
|
assertEquals(ModelConstants.REFERENCES, r1.getRelClass());
|
||||||
|
|
||||||
assertEquals(s.getId(), r2.getTarget());
|
assertEquals(s.getId(), r2.getSource());
|
||||||
assertEquals("50|doi_________::b453e7b4b2130ace57ff0c3db470a982", r2.getSource());
|
assertEquals("50|doi_________::b453e7b4b2130ace57ff0c3db470a982", r2.getTarget());
|
||||||
assertEquals(ModelConstants.RESULT_RESULT, r2.getRelType());
|
assertEquals(ModelConstants.RESULT_RESULT, r2.getRelType());
|
||||||
assertEquals(ModelConstants.RELATIONSHIP, r2.getSubRelType());
|
assertEquals(ModelConstants.RELATIONSHIP, r2.getSubRelType());
|
||||||
assertEquals(ModelConstants.REFERENCES, r2.getRelClass());
|
assertEquals(ModelConstants.IS_REFERENCED_BY, r2.getRelClass());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testClaimDedup() throws IOException, DocumentException {
|
void testClaimDedup() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_claim_dedup.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_claim_dedup.xml")));
|
||||||
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
@ -792,7 +819,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testNakala() throws IOException, DocumentException {
|
void testNakala() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_nakala.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_nakala.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -820,7 +847,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testEnermaps() throws IOException, DocumentException {
|
void testEnermaps() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("enermaps.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("enermaps.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -845,7 +872,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testClaimFromCrossref() throws IOException, DocumentException {
|
void testClaimFromCrossref() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_claim_crossref.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_claim_crossref.xml")));
|
||||||
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
@ -862,7 +889,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testODFRecord() throws IOException, DocumentException {
|
void testODFRecord() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_record.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_record.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
System.out.println("***************");
|
System.out.println("***************");
|
||||||
|
@ -882,7 +909,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testTextGrid() throws IOException, DocumentException {
|
void testTextGrid() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("textgrid.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("textgrid.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -916,7 +943,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testBologna() throws IOException, DocumentException {
|
void testBologna() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf-bologna.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf-bologna.xml")));
|
||||||
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -933,7 +960,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testJairo() throws IOException, DocumentException {
|
void testJairo() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_jairo.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("oaf_jairo.xml")));
|
||||||
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -971,7 +998,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testZenodo() throws IOException, DocumentException {
|
void testZenodo() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_zenodo.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_zenodo.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -1016,7 +1043,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testOdfFromHdfs() throws IOException, DocumentException {
|
void testOdfFromHdfs() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_from_hdfs.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_from_hdfs.xml")));
|
||||||
|
|
||||||
|
@ -1065,7 +1092,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testXMLEncodedURL() throws IOException, DocumentException {
|
void testXMLEncodedURL() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("encoded-url.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("encoded-url.xml")));
|
||||||
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OafToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
|
||||||
|
@ -1081,7 +1108,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testXMLEncodedURL_ODF() throws IOException, DocumentException {
|
void testXMLEncodedURL_ODF() throws IOException {
|
||||||
final String xml = IOUtils
|
final String xml = IOUtils
|
||||||
.toString(Objects.requireNonNull(getClass().getResourceAsStream("encoded-url_odf.xml")));
|
.toString(Objects.requireNonNull(getClass().getResourceAsStream("encoded-url_odf.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
|
@ -1245,7 +1272,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testRiunet() throws IOException, DocumentException {
|
void testRiunet() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("riunet.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("riunet.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
System.out.println("***************");
|
System.out.println("***************");
|
||||||
|
@ -1291,7 +1318,7 @@ class MappersTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
void testIRISPub() throws IOException, DocumentException {
|
void testIRISPub() throws IOException {
|
||||||
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("iris-odf.xml")));
|
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("iris-odf.xml")));
|
||||||
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
|
||||||
System.out.println("***************");
|
System.out.println("***************");
|
||||||
|
|
File diff suppressed because one or more lines are too long
|
@ -794,28 +794,6 @@
|
||||||
},
|
},
|
||||||
"value": "FOS: Computer and information sciences"
|
"value": "FOS: Computer and information sciences"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"dataInfo": {
|
|
||||||
"deletedbyinference": false,
|
|
||||||
"inferenceprovenance": "",
|
|
||||||
"inferred": false,
|
|
||||||
"invisible": false,
|
|
||||||
"provenanceaction": {
|
|
||||||
"classid": "sysimport:crosswalk:datasetarchive",
|
|
||||||
"classname": "sysimport:crosswalk:datasetarchive",
|
|
||||||
"schemeid": "dnet:provenanceActions",
|
|
||||||
"schemename": "dnet:provenanceActions"
|
|
||||||
},
|
|
||||||
"trust": "0.9"
|
|
||||||
},
|
|
||||||
"qualifier": {
|
|
||||||
"classid": "keyword",
|
|
||||||
"classname": "keyword",
|
|
||||||
"schemeid": "dnet:subject_classification_typologies",
|
|
||||||
"schemename": "dnet:subject_classification_typologies"
|
|
||||||
},
|
|
||||||
"value": "0101 mathematics"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"dataInfo": {
|
"dataInfo": {
|
||||||
"deletedbyinference": false,
|
"deletedbyinference": false,
|
||||||
|
@ -831,8 +809,8 @@
|
||||||
"trust": "0.9"
|
"trust": "0.9"
|
||||||
},
|
},
|
||||||
"qualifier": {
|
"qualifier": {
|
||||||
"classid": "keyword",
|
"classid": "FOS",
|
||||||
"classname": "keyword",
|
"classname": "Fields of Science and Technology classification",
|
||||||
"schemeid": "dnet:subject_classification_typologies",
|
"schemeid": "dnet:subject_classification_typologies",
|
||||||
"schemename": "dnet:subject_classification_typologies"
|
"schemename": "dnet:subject_classification_typologies"
|
||||||
},
|
},
|
||||||
|
@ -910,8 +888,8 @@
|
||||||
"inferred": false,
|
"inferred": false,
|
||||||
"invisible": false,
|
"invisible": false,
|
||||||
"provenanceaction": {
|
"provenanceaction": {
|
||||||
"classid": "sysimport:actionset",
|
"classid": "subject:fos",
|
||||||
"classname": "Harvested",
|
"classname": "subject:fos",
|
||||||
"schemeid": "dnet:provenanceActions",
|
"schemeid": "dnet:provenanceActions",
|
||||||
"schemename": "dnet:provenanceActions"
|
"schemename": "dnet:provenanceActions"
|
||||||
},
|
},
|
||||||
|
@ -923,7 +901,7 @@
|
||||||
"schemeid": "dnet:subject_classification_typologies",
|
"schemeid": "dnet:subject_classification_typologies",
|
||||||
"schemename": "dnet:subject_classification_typologies"
|
"schemename": "dnet:subject_classification_typologies"
|
||||||
},
|
},
|
||||||
"value": "Avicennia"
|
"value": "0102 computer and information sciences"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"dataInfo": {
|
"dataInfo": {
|
||||||
|
|
|
@ -0,0 +1,129 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<record xmlns:oaire="http://namespace.openaire.eu/schema/oaire/"
|
||||||
|
xmlns:datacite="http://datacite.org/schema/kernel-4"
|
||||||
|
xmlns:dr="http://www.driver-repository.eu/namespace/dr"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:oaf="http://namespace.openaire.eu/oaf"
|
||||||
|
xmlns:oai="http://www.openarchives.org/OAI/2.0/"
|
||||||
|
xmlns:dri="http://www.driver-repository.eu/namespace/dri"
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||||
|
xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||||
|
<header xmlns="http://www.openarchives.org/OAI/2.0/">
|
||||||
|
<dri:objIdentifier>base_oa_____::7ecf1ef502253efffe203ca9a22bb9f1</dri:objIdentifier>
|
||||||
|
<identifier>ftunivqespace:oai:espace.library.uq.edu.au:UQ:336902</identifier>
|
||||||
|
<datestamp>2020-12-22T10:30:27Z</datestamp>
|
||||||
|
<dr:dateOfTransformation>2024-09-10T17:21:36.972Z</dr:dateOfTransformation>
|
||||||
|
</header>
|
||||||
|
<metadata>
|
||||||
|
<datacite:resource>
|
||||||
|
<datacite:identifier identifierType="DOI">https://doi.org/10.1016/j.envint.2014.07.004</datacite:identifier>
|
||||||
|
<datacite:alternateIdentifiers>
|
||||||
|
<datacite:identifier alternateIdentifierType="url">https://espace.library.uq.edu.au/view/UQ:336902</datacite:identifier>
|
||||||
|
<datacite:identifier alternateIdentifierType="oai-original">ftunivqespace:oai:espace.library.uq.edu.au:UQ:336902</datacite:identifier>
|
||||||
|
</datacite:alternateIdentifiers>
|
||||||
|
<datacite:relatedIdentifiers/>
|
||||||
|
<datacite:resourceType>Article contribution</datacite:resourceType>
|
||||||
|
<datacite:titles>
|
||||||
|
<datacite:title>The role of environmental factors in the spatial distribution of Japanese encephalitis in mainland China</datacite:title>
|
||||||
|
</datacite:titles>
|
||||||
|
<datacite:creators>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Wang, Liya</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Hu, Wenbiao</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Soares Magalhaes, Ricardo J.</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Bi, Peng</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Ding, Fan</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Sun, Hailong</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Li, Shenlong</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Yin, Wenwu</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Wei, Lan</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Liu, Qiyong</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Haque, Ubydul</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Sun, Yansong</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Huang, Liuyu</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Tong, Shilu</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Clements, Archie C.A.</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Zhang, Wenyi</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
<datacite:creator>
|
||||||
|
<datacite:creatorName>Li, Chengyi</datacite:creatorName>
|
||||||
|
</datacite:creator>
|
||||||
|
</datacite:creators>
|
||||||
|
<datacite:contributors/>
|
||||||
|
<datacite:descriptions>
|
||||||
|
<datacite:description descriptionType="Abstract">Japanese encephalitis (JE) is the most common cause of viral encephalitis and an important public health concern in the Asia-Pacific region, particularly in China where 50% of global cases are notified. To explore the association between environmental factors and human JE cases and identify the high risk areas for JE transmission in China, we used annual notified data on JE cases at the center of administrative township and environmental variables with a pixel resolution of 1. km. ×. 1. km from 2005 to 2011 to construct models using ecological niche modeling (ENM) approaches based on maximum entropy. These models were then validated by overlaying reported human JE case localities from 2006 to 2012 onto each prediction map. ENMs had good discriminatory ability with the area under the curve (AUC) of the receiver operating curve (ROC) of 0.82-0.91, and low extrinsic omission rate of 5.44-7.42%. Resulting maps showed JE being presented extensively throughout southwestern and central China, with local spatial variations in probability influenced by minimum temperatures, human population density, mean temperatures, and elevation, with contribution of 17.94%-38.37%, 15.47%-21.82%, 3.86%-21.22%, and 12.05%-16.02%, respectively. Approximately 60% of JE cases occurred in predicted high risk areas, which covered less than 6% of areas in mainland China. Our findings will help inform optimal geographical allocation of the limited resources available for JE prevention and control in China, find hidden high-risk areas, and increase the effectiveness of public health interventions against JE transmission.</datacite:description>
|
||||||
|
</datacite:descriptions>
|
||||||
|
<datacite:subjects>
|
||||||
|
<datacite:subject>Japanese encephalitis</datacite:subject>
|
||||||
|
<datacite:subject>Ecological niche model</datacite:subject>
|
||||||
|
<datacite:subject>MaxEnt</datacite:subject>
|
||||||
|
<datacite:subject>China</datacite:subject>
|
||||||
|
<datacite:subject>2300 Environmental Science</datacite:subject>
|
||||||
|
<datacite:subject classificationCode="950" subjectScheme="ddc">950</datacite:subject>
|
||||||
|
</datacite:subjects>
|
||||||
|
<datacite:publisher>Pergamon Press</datacite:publisher>
|
||||||
|
<datacite:publicationYear>2014</datacite:publicationYear>
|
||||||
|
<datacite:formats/>
|
||||||
|
<datacite:language>eng</datacite:language>
|
||||||
|
<oaf:accessrights/>
|
||||||
|
</datacite:resource>
|
||||||
|
<dr:CobjCategory type="publication">0001</dr:CobjCategory>
|
||||||
|
<oaf:accessrights>UNKNOWN</oaf:accessrights>
|
||||||
|
<oaf:identifier identifierType="doi">10.1163/qwerty</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="doi">0.1163/18763308-90001038</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="doi">https://doi.org/10.1016/j.envint.2014.07.004</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="doi">https://doi.org/10.1080/09672567.2013.792375</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="doi">http://doi.org/10.1080/08673487.2012.812376</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="doi">http://dx.doi.org/10.1090/08673487.2012.812376</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="url">https://espace.library.uq.edu.au/view/UQ:336902</oaf:identifier>
|
||||||
|
<oaf:identifier identifierType="oai-original">ftunivqespace:oai:espace.library.uq.edu.au:UQ:336902</oaf:identifier>
|
||||||
|
<oaf:hostedBy name="The University of Queensland: UQ eSpace" id="opendoar____::575"/>
|
||||||
|
<oaf:collectedFrom name="Bielefeld Academic Search Engine (BASE)"
|
||||||
|
id="openaire____::base_search"/>
|
||||||
|
<oaf:dateAccepted>2014-12-01</oaf:dateAccepted>
|
||||||
|
<oaf:relation relClass="hasAuthorInstitution"
|
||||||
|
relType="resultOrganization"
|
||||||
|
subRelType="affiliation"
|
||||||
|
targetType="organization">ror_________::https://ror.org/00rqy9422</oaf:relation>
|
||||||
|
<oaf:datainfo>
|
||||||
|
<oaf:inferred>false</oaf:inferred>
|
||||||
|
<oaf:deletedbyinference>false</oaf:deletedbyinference>
|
||||||
|
<oaf:trust>0.89</oaf:trust>
|
||||||
|
<oaf:inferenceprovenance/>
|
||||||
|
<oaf:provenanceaction classid="sysimport:crosswalk:aggregator"
|
||||||
|
classname="sysimport:crosswalk:aggregator"
|
||||||
|
schemeid="dnet:provenanceActions"
|
||||||
|
schemename="dnet:provenanceActions"/>
|
||||||
|
</oaf:datainfo>
|
||||||
|
</metadata>
|
||||||
|
</record>
|
|
@ -130,5 +130,10 @@
|
||||||
"value": [
|
"value": [
|
||||||
"Pippo", "Foo"
|
"Pippo", "Foo"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"field": "typology",
|
||||||
|
"type": "string",
|
||||||
|
"value": "Government"
|
||||||
}
|
}
|
||||||
]
|
]
|
Binary file not shown.
|
@ -4,12 +4,13 @@ import eu.dnetlib.dhp.schema.sx.scholix.ScholixResource
|
||||||
import eu.dnetlib.dhp.sx.graph.SparkCreateScholexplorerDump
|
import eu.dnetlib.dhp.sx.graph.SparkCreateScholexplorerDump
|
||||||
import org.apache.spark.SparkConf
|
import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
import org.apache.spark.sql.{Encoder, Encoders, SparkSession}
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.{Disabled, Test}
|
||||||
import org.objenesis.strategy.StdInstantiatorStrategy
|
import org.objenesis.strategy.StdInstantiatorStrategy
|
||||||
|
|
||||||
class ScholixGenerationTest {
|
class ScholixGenerationTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@Disabled
|
||||||
def generateScholix(): Unit = {
|
def generateScholix(): Unit = {
|
||||||
|
|
||||||
val spark: SparkSession = SparkSession.builder().master("local[*]").getOrCreate()
|
val spark: SparkSession = SparkSession.builder().master("local[*]").getOrCreate()
|
||||||
|
|
|
@ -175,7 +175,8 @@ public class XmlSerializationUtils {
|
||||||
.append("<")
|
.append("<")
|
||||||
.append(name)
|
.append(name)
|
||||||
.append(" ")
|
.append(" ")
|
||||||
.append(attr(measure.getId(), kv.getValue()))
|
.append(attr("id", measure.getId()))
|
||||||
|
.append(attr("score", kv.getValue()))
|
||||||
.append(attr("datasource", kv.getKey()))
|
.append(attr("datasource", kv.getKey()))
|
||||||
.append(" />");
|
.append(" />");
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,7 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="oaiphm_provision"/>
|
<start to="irish_oaiphm_provision"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue