forked from D-Net/dnet-hadoop
completed download function to wf; added accumulators
This commit is contained in:
parent
53b22c1937
commit
8812ab65e1
|
@ -4,12 +4,14 @@ package eu.dnetlib.doiboost.orcid;
|
||||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.text.SimpleDateFormat;
|
import java.time.LocalDate;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
import org.apache.commons.compress.utils.Lists;
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
||||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||||
import org.apache.http.client.methods.HttpGet;
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
@ -17,32 +19,31 @@ import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClients;
|
import org.apache.http.impl.client.HttpClients;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
|
import org.apache.spark.api.java.function.FlatMapFunction;
|
||||||
import org.apache.spark.api.java.function.Function;
|
import org.apache.spark.api.java.function.Function;
|
||||||
import org.apache.spark.util.LongAccumulator;
|
import org.apache.spark.util.LongAccumulator;
|
||||||
import org.mortbay.log.Log;
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.google.gson.JsonElement;
|
import com.google.gson.JsonElement;
|
||||||
import com.google.gson.JsonParser;
|
import com.google.gson.JsonParser;
|
||||||
import com.ximpleware.NavException;
|
|
||||||
import com.ximpleware.ParseException;
|
|
||||||
import com.ximpleware.XPathEvalException;
|
|
||||||
import com.ximpleware.XPathParseException;
|
|
||||||
|
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.doiboost.orcid.model.DownloadedRecordData;
|
import eu.dnetlib.doiboost.orcid.model.DownloadedRecordData;
|
||||||
import eu.dnetlib.doiboost.orcid.model.WorkData;
|
|
||||||
import eu.dnetlib.doiboost.orcid.xml.XMLRecordParser;
|
import eu.dnetlib.doiboost.orcid.xml.XMLRecordParser;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class SparkDownloadOrcidWorks {
|
public class SparkDownloadOrcidWorks {
|
||||||
|
|
||||||
static Logger logger = LoggerFactory.getLogger(SparkDownloadOrcidWorks.class);
|
static Logger logger = LoggerFactory.getLogger(SparkDownloadOrcidWorks.class);
|
||||||
static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
|
public static final String LAMBDA_FILE_DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
|
||||||
static final String lastUpdate = "2020-09-29 00:00:00";
|
public static final DateTimeFormatter LAMBDA_FILE_DATE_FORMATTER = DateTimeFormatter
|
||||||
|
.ofPattern(LAMBDA_FILE_DATE_FORMAT);
|
||||||
|
public static final String ORCID_XML_DATETIME_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
|
||||||
|
public static final DateTimeFormatter ORCID_XML_DATETIMEFORMATTER = DateTimeFormatter
|
||||||
|
.ofPattern(ORCID_XML_DATETIME_FORMAT);
|
||||||
|
public static final String lastUpdateValue = "2020-09-29 00:00:00";
|
||||||
|
|
||||||
public static void main(String[] args) throws IOException, Exception {
|
public static void main(String[] args) throws IOException, Exception {
|
||||||
|
|
||||||
|
@ -60,12 +61,8 @@ public class SparkDownloadOrcidWorks {
|
||||||
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
logger.info("isSparkSessionManaged: {}", isSparkSessionManaged);
|
||||||
final String workingPath = parser.get("workingPath");
|
final String workingPath = parser.get("workingPath");
|
||||||
logger.info("workingPath: ", workingPath);
|
logger.info("workingPath: ", workingPath);
|
||||||
// final String outputPath = parser.get("outputPath");
|
final String outputPath = parser.get("outputPath");
|
||||||
final String outputPath = "downloads/updated_works";
|
|
||||||
logger.info("outputPath: ", outputPath);
|
|
||||||
final String token = parser.get("token");
|
final String token = parser.get("token");
|
||||||
// final String lambdaFileName = parser.get("lambdaFileName");
|
|
||||||
// logger.info("lambdaFileName: ", lambdaFileName);
|
|
||||||
|
|
||||||
SparkConf conf = new SparkConf();
|
SparkConf conf = new SparkConf();
|
||||||
runWithSparkSession(
|
runWithSparkSession(
|
||||||
|
@ -73,9 +70,23 @@ public class SparkDownloadOrcidWorks {
|
||||||
isSparkSessionManaged,
|
isSparkSessionManaged,
|
||||||
spark -> {
|
spark -> {
|
||||||
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
|
LongAccumulator updatedAuthorsAcc = spark.sparkContext().longAccumulator("updated_authors");
|
||||||
LongAccumulator parsedRecordsAcc = spark.sparkContext().longAccumulator("parsed_records");
|
LongAccumulator parsedAuthorsAcc = spark.sparkContext().longAccumulator("parsed_authors");
|
||||||
LongAccumulator modifiedRecordsAcc = spark.sparkContext().longAccumulator("to_download_records");
|
LongAccumulator parsedWorksAcc = spark.sparkContext().longAccumulator("parsed_works");
|
||||||
|
LongAccumulator modifiedWorksAcc = spark.sparkContext().longAccumulator("modified_works");
|
||||||
|
LongAccumulator maxModifiedWorksLimitAcc = spark
|
||||||
|
.sparkContext()
|
||||||
|
.longAccumulator("max_modified_works_limit");
|
||||||
|
LongAccumulator errorCodeFoundAcc = spark.sparkContext().longAccumulator("error_code_found");
|
||||||
|
LongAccumulator errorLoadingJsonFoundAcc = spark
|
||||||
|
.sparkContext()
|
||||||
|
.longAccumulator("error_loading_json_found");
|
||||||
|
LongAccumulator errorLoadingXMLFoundAcc = spark
|
||||||
|
.sparkContext()
|
||||||
|
.longAccumulator("error_loading_xml_found");
|
||||||
|
LongAccumulator errorParsingXMLFoundAcc = spark
|
||||||
|
.sparkContext()
|
||||||
|
.longAccumulator("error_parsing_xml_found");
|
||||||
LongAccumulator downloadedRecordsAcc = spark.sparkContext().longAccumulator("downloaded_records");
|
LongAccumulator downloadedRecordsAcc = spark.sparkContext().longAccumulator("downloaded_records");
|
||||||
LongAccumulator errorHTTP403Acc = spark.sparkContext().longAccumulator("error_HTTP_403");
|
LongAccumulator errorHTTP403Acc = spark.sparkContext().longAccumulator("error_HTTP_403");
|
||||||
LongAccumulator errorHTTP409Acc = spark.sparkContext().longAccumulator("error_HTTP_409");
|
LongAccumulator errorHTTP409Acc = spark.sparkContext().longAccumulator("error_HTTP_409");
|
||||||
|
@ -83,38 +94,60 @@ public class SparkDownloadOrcidWorks {
|
||||||
LongAccumulator errorHTTP525Acc = spark.sparkContext().longAccumulator("error_HTTP_525");
|
LongAccumulator errorHTTP525Acc = spark.sparkContext().longAccumulator("error_HTTP_525");
|
||||||
LongAccumulator errorHTTPGenericAcc = spark.sparkContext().longAccumulator("error_HTTP_Generic");
|
LongAccumulator errorHTTPGenericAcc = spark.sparkContext().longAccumulator("error_HTTP_Generic");
|
||||||
|
|
||||||
logger.info("Retrieving updated authors");
|
|
||||||
JavaPairRDD<Text, Text> updatedAuthorsRDD = sc
|
JavaPairRDD<Text, Text> updatedAuthorsRDD = sc
|
||||||
.sequenceFile(workingPath + "downloads/updated_authors/*", Text.class, Text.class);
|
.sequenceFile(workingPath + "downloads/updated_authors/*", Text.class, Text.class);
|
||||||
logger.info("Updated authors retrieved: " + updatedAuthorsRDD.count());
|
updatedAuthorsAcc.setValue(updatedAuthorsRDD.count());
|
||||||
|
|
||||||
Function<Tuple2<Text, Text>, Iterator<String>> retrieveWorkUrlFunction = data -> {
|
FlatMapFunction<Tuple2<Text, Text>, String> retrieveWorkUrlFunction = data -> {
|
||||||
String orcidId = data._1().toString();
|
String orcidId = data._1().toString();
|
||||||
String jsonData = data._2().toString();
|
String jsonData = data._2().toString();
|
||||||
List<String> orcidIdWorkId = Lists.newArrayList();
|
List<String> workIds = new ArrayList<>();
|
||||||
Map<String, String> workIdLastModifiedDate = retrieveWorkIdLastModifiedDate(jsonData);
|
Map<String, String> workIdLastModifiedDate = new HashMap<>();
|
||||||
|
JsonElement jElement = new JsonParser().parse(jsonData);
|
||||||
|
String statusCode = getJsonValue(jElement, "statusCode");
|
||||||
|
if (statusCode.equals("200")) {
|
||||||
|
String compressedData = getJsonValue(jElement, "compressedData");
|
||||||
|
if (StringUtils.isEmpty(compressedData)) {
|
||||||
|
errorLoadingJsonFoundAcc.add(1);
|
||||||
|
} else {
|
||||||
|
String authorSummary = ArgumentApplicationParser.decompressValue(compressedData);
|
||||||
|
if (StringUtils.isEmpty(authorSummary)) {
|
||||||
|
errorLoadingXMLFoundAcc.add(1);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
workIdLastModifiedDate = XMLRecordParser
|
||||||
|
.retrieveWorkIdLastModifiedDate(authorSummary.getBytes());
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("parsing " + orcidId + " [" + jsonData + "]", e);
|
||||||
|
errorParsingXMLFoundAcc.add(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
errorCodeFoundAcc.add(1);
|
||||||
|
}
|
||||||
|
parsedAuthorsAcc.add(1);
|
||||||
workIdLastModifiedDate.forEach((k, v) -> {
|
workIdLastModifiedDate.forEach((k, v) -> {
|
||||||
|
parsedWorksAcc.add(1);
|
||||||
if (isModified(orcidId, v)) {
|
if (isModified(orcidId, v)) {
|
||||||
orcidIdWorkId.add(orcidId.concat("/work/").concat(k));
|
modifiedWorksAcc.add(1);
|
||||||
|
workIds.add(orcidId.concat("/work/").concat(k));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Iterator<String> iterator = orcidIdWorkId.iterator();
|
if (workIdLastModifiedDate.size() > 50) {
|
||||||
return iterator;
|
maxModifiedWorksLimitAcc.add(1);
|
||||||
|
}
|
||||||
|
return workIds.iterator();
|
||||||
};
|
};
|
||||||
|
|
||||||
List<Iterator<String>> toDownloadWorksRDD = updatedAuthorsRDD
|
Function<String, Tuple2<String, String>> downloadWorkFunction = data -> {
|
||||||
.map(retrieveWorkUrlFunction)
|
String relativeWorkUrl = data;
|
||||||
.take(1000);
|
String orcidId = relativeWorkUrl.split("/")[0];
|
||||||
sc.parallelize(toDownloadWorksRDD).saveAsTextFile(workingPath.concat("downloads/updated_works_test/"));
|
|
||||||
|
|
||||||
Function<Tuple2<Text, Text>, Tuple2<String, String>> downloadRecordFunction = data -> {
|
|
||||||
String orcidId = data._1().toString();
|
|
||||||
String lastModifiedDate = data._2().toString();
|
|
||||||
final DownloadedRecordData downloaded = new DownloadedRecordData();
|
final DownloadedRecordData downloaded = new DownloadedRecordData();
|
||||||
downloaded.setOrcidId(orcidId);
|
downloaded.setOrcidId(orcidId);
|
||||||
downloaded.setLastModifiedDate(lastModifiedDate);
|
downloaded.setLastModifiedDate(lastUpdateValue);
|
||||||
try (CloseableHttpClient client = HttpClients.createDefault()) {
|
try (CloseableHttpClient client = HttpClients.createDefault()) {
|
||||||
HttpGet httpGet = new HttpGet("https://api.orcid.org/v3.0/" + orcidId + "/work");
|
HttpGet httpGet = new HttpGet("https://api.orcid.org/v3.0/" + relativeWorkUrl);
|
||||||
httpGet.addHeader("Accept", "application/vnd.orcid+xml");
|
httpGet.addHeader("Accept", "application/vnd.orcid+xml");
|
||||||
httpGet.addHeader("Authorization", String.format("Bearer %s", token));
|
httpGet.addHeader("Authorization", String.format("Bearer %s", token));
|
||||||
long startReq = System.currentTimeMillis();
|
long startReq = System.currentTimeMillis();
|
||||||
|
@ -153,62 +186,55 @@ public class SparkDownloadOrcidWorks {
|
||||||
.compressArgument(IOUtils.toString(response.getEntity().getContent())));
|
.compressArgument(IOUtils.toString(response.getEntity().getContent())));
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
logger.info("Downloading " + orcidId, e.getMessage());
|
logger.info("Downloading " + orcidId, e.getMessage());
|
||||||
|
if (downloaded.getStatusCode() == 503) {
|
||||||
|
throw new RuntimeException("Orcid request rate limit reached (HTTP 503)");
|
||||||
|
}
|
||||||
downloaded.setErrorMessage(e.getMessage());
|
downloaded.setErrorMessage(e.getMessage());
|
||||||
return downloaded.toTuple2();
|
return downloaded.toTuple2();
|
||||||
}
|
}
|
||||||
return downloaded.toTuple2();
|
return downloaded.toTuple2();
|
||||||
};
|
};
|
||||||
|
|
||||||
// sc.hadoopConfiguration().set("mapreduce.output.fileoutputformat.compress", "true");
|
// sc.hadoopConfiguration().set("mapreduce.output.fileoutputformat.compress", "true");
|
||||||
|
|
||||||
// logger.info("Start downloading ...");
|
updatedAuthorsRDD
|
||||||
// updatedAuthorsRDD
|
.flatMap(retrieveWorkUrlFunction)
|
||||||
// .map(downloadRecordFunction)
|
.repartition(100)
|
||||||
// .mapToPair(t -> new Tuple2(new Text(t._1()), new Text(t._2())))
|
.map(downloadWorkFunction)
|
||||||
// .saveAsNewAPIHadoopFile(
|
.mapToPair(t -> new Tuple2(new Text(t._1()), new Text(t._2())))
|
||||||
// workingPath.concat(outputPath),
|
.saveAsTextFile(workingPath.concat(outputPath), GzipCodec.class);
|
||||||
// Text.class,
|
// .saveAsNewAPIHadoopFile(
|
||||||
// Text.class,
|
// workingPath.concat(outputPath),
|
||||||
// SequenceFileOutputFormat.class,
|
// Text.class,
|
||||||
// sc.hadoopConfiguration());
|
// Text.class,
|
||||||
// logger.info("parsedRecordsAcc: " + parsedRecordsAcc.value().toString());
|
// SequenceFileOutputFormat.class,
|
||||||
// logger.info("modifiedRecordsAcc: " + modifiedRecordsAcc.value().toString());
|
// sc.hadoopConfiguration());
|
||||||
// logger.info("downloadedRecordsAcc: " + downloadedRecordsAcc.value().toString());
|
logger.info("updatedAuthorsAcc: " + updatedAuthorsAcc.value().toString());
|
||||||
// logger.info("errorHTTP403Acc: " + errorHTTP403Acc.value().toString());
|
logger.info("parsedAuthorsAcc: " + parsedAuthorsAcc.value().toString());
|
||||||
// logger.info("errorHTTP409Acc: " + errorHTTP409Acc.value().toString());
|
logger.info("parsedWorksAcc: " + parsedWorksAcc.value().toString());
|
||||||
// logger.info("errorHTTP503Acc: " + errorHTTP503Acc.value().toString());
|
logger.info("modifiedWorksAcc: " + modifiedWorksAcc.value().toString());
|
||||||
// logger.info("errorHTTP525Acc: " + errorHTTP525Acc.value().toString());
|
logger.info("maxModifiedWorksLimitAcc: " + maxModifiedWorksLimitAcc.value().toString());
|
||||||
// logger.info("errorHTTPGenericAcc: " + errorHTTPGenericAcc.value().toString());
|
logger.info("errorCodeFoundAcc: " + errorCodeFoundAcc.value().toString());
|
||||||
|
logger.info("errorLoadingJsonFoundAcc: " + errorLoadingJsonFoundAcc.value().toString());
|
||||||
|
logger.info("errorLoadingXMLFoundAcc: " + errorLoadingXMLFoundAcc.value().toString());
|
||||||
|
logger.info("errorParsingXMLFoundAcc: " + errorParsingXMLFoundAcc.value().toString());
|
||||||
|
logger.info("downloadedRecordsAcc: " + downloadedRecordsAcc.value().toString());
|
||||||
|
logger.info("errorHTTP403Acc: " + errorHTTP403Acc.value().toString());
|
||||||
|
logger.info("errorHTTP409Acc: " + errorHTTP409Acc.value().toString());
|
||||||
|
logger.info("errorHTTP503Acc: " + errorHTTP503Acc.value().toString());
|
||||||
|
logger.info("errorHTTP525Acc: " + errorHTTP525Acc.value().toString());
|
||||||
|
logger.info("errorHTTPGenericAcc: " + errorHTTPGenericAcc.value().toString());
|
||||||
});
|
});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean isModified(String orcidId, String modifiedDate) {
|
public static boolean isModified(String orcidId, String modifiedDateValue) {
|
||||||
Date modifiedDateDt = null;
|
LocalDate modifiedDate = null;
|
||||||
Date lastUpdateDt = null;
|
LocalDate lastUpdate = null;
|
||||||
try {
|
modifiedDate = LocalDate.parse(modifiedDateValue, SparkDownloadOrcidWorks.ORCID_XML_DATETIMEFORMATTER);
|
||||||
if (modifiedDate.length() != 19) {
|
lastUpdate = LocalDate
|
||||||
modifiedDate = modifiedDate.substring(0, 19);
|
.parse(SparkDownloadOrcidWorks.lastUpdateValue, SparkDownloadOrcidWorks.LAMBDA_FILE_DATE_FORMATTER);
|
||||||
}
|
return modifiedDate.isAfter(lastUpdate);
|
||||||
modifiedDateDt = new SimpleDateFormat(DATE_FORMAT).parse(modifiedDate);
|
|
||||||
lastUpdateDt = new SimpleDateFormat(DATE_FORMAT).parse(lastUpdate);
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.info("[" + orcidId + "] Parsing date: ", e.getMessage());
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return modifiedDateDt.after(lastUpdateDt);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Map<String, String> retrieveWorkIdLastModifiedDate(String json)
|
|
||||||
throws XPathEvalException, NavException, XPathParseException, ParseException {
|
|
||||||
JsonElement jElement = new JsonParser().parse(json);
|
|
||||||
String statusCode = getJsonValue(jElement, "statusCode");
|
|
||||||
if (statusCode.equals("200")) {
|
|
||||||
String compressedData = getJsonValue(jElement, "compressedData");
|
|
||||||
String authorSummary = ArgumentApplicationParser.decompressValue(compressedData);
|
|
||||||
return XMLRecordParser.retrieveWorkIdLastModifiedDate(authorSummary.getBytes());
|
|
||||||
}
|
|
||||||
return new HashMap<>();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getJsonValue(JsonElement jElement, String property) {
|
private static String getJsonValue(JsonElement jElement, String property) {
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
|
||||||
package eu.dnetlib.doiboost.orcid.xml;
|
package eu.dnetlib.doiboost.orcid.xml;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
|
|
||||||
import org.mortbay.log.Log;
|
import org.mortbay.log.Log;
|
||||||
|
@ -161,30 +162,22 @@ public class XMLRecordParser {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, String> retrieveWorkIdLastModifiedDate(byte[] bytes)
|
public static Map<String, String> retrieveWorkIdLastModifiedDate(byte[] bytes)
|
||||||
throws ParseException, XPathParseException, NavException, XPathEvalException {
|
throws ParseException, XPathParseException, NavException, XPathEvalException, IOException {
|
||||||
final VTDGen vg = new VTDGen();
|
final VTDGen vg = new VTDGen();
|
||||||
vg.setDoc(bytes);
|
vg.setDoc(bytes);
|
||||||
vg.parse(true);
|
vg.parse(true);
|
||||||
final VTDNav vn = vg.getNav();
|
final VTDNav vn = vg.getNav();
|
||||||
final AutoPilot ap = new AutoPilot(vn);
|
final AutoPilot ap = new AutoPilot(vn);
|
||||||
ap.declareXPathNameSpace(NS_COMMON, NS_COMMON_URL);
|
|
||||||
ap.declareXPathNameSpace(NS_PERSON, NS_PERSON_URL);
|
|
||||||
ap.declareXPathNameSpace(NS_DETAILS, NS_DETAILS_URL);
|
|
||||||
ap.declareXPathNameSpace(NS_OTHER, NS_OTHER_URL);
|
|
||||||
ap.declareXPathNameSpace(NS_RECORD, NS_RECORD_URL);
|
|
||||||
ap.declareXPathNameSpace(NS_ERROR, NS_ERROR_URL);
|
|
||||||
ap.declareXPathNameSpace(NS_WORK, NS_WORK_URL);
|
ap.declareXPathNameSpace(NS_WORK, NS_WORK_URL);
|
||||||
ap.declareXPathNameSpace(NS_ACTIVITIES, NS_ACTIVITIES_URL);
|
ap.declareXPathNameSpace(NS_COMMON, NS_COMMON_URL);
|
||||||
Map<String, String> workIdLastModifiedDate = new HashMap<>();
|
Map<String, String> workIdLastModifiedDate = new HashMap<>();
|
||||||
ap.selectXPath("//work:work-summary");
|
ap.selectXPath("//work:work-summary");
|
||||||
|
String workId = "";
|
||||||
while (ap.evalXPath() != -1) {
|
while (ap.evalXPath() != -1) {
|
||||||
String workId = "";
|
|
||||||
String lastModifiedDate = "";
|
String lastModifiedDate = "";
|
||||||
int attr = vn.getAttrVal("put-code");
|
int attr = vn.getAttrVal("put-code");
|
||||||
if (attr > -1) {
|
if (attr > -1) {
|
||||||
workId = vn.toNormalizedString(attr);
|
workId = vn.toNormalizedString(attr);
|
||||||
workIdLastModifiedDate.put(workId, "");
|
|
||||||
}
|
}
|
||||||
if (vn.toElement(VTDNav.FIRST_CHILD, "common:last-modified-date")) {
|
if (vn.toElement(VTDNav.FIRST_CHILD, "common:last-modified-date")) {
|
||||||
int val = vn.getText();
|
int val = vn.getText();
|
||||||
|
|
|
@ -66,7 +66,7 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="DownloadOrcidAuthors"/>
|
<start to="DownloadOrcidWorks"/>
|
||||||
|
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
|
@ -163,6 +163,33 @@
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
<action name="DownloadOrcidWorks">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>DownloadOrcidWorks</name>
|
||||||
|
<class>eu.dnetlib.doiboost.orcid.SparkDownloadOrcidWorks</class>
|
||||||
|
<jar>dhp-doiboost-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>
|
||||||
|
--conf spark.dynamicAllocation.enabled=true
|
||||||
|
--conf spark.dynamicAllocation.maxExecutors=${spark2MaxExecutors}
|
||||||
|
--executor-memory=${sparkExecutorMemory}
|
||||||
|
--driver-memory=${sparkDriverMemory}
|
||||||
|
--conf spark.extraListeners=${spark2ExtraListeners}
|
||||||
|
--conf spark.yarn.historyServer.address=${spark2YarnHistoryServerAddress}
|
||||||
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
|
--conf spark.sql.queryExecutionListeners=${spark2SqlQueryExecutionListeners}
|
||||||
|
</spark-opts>
|
||||||
|
<arg>-w</arg><arg>${workingPath}/</arg>
|
||||||
|
<arg>-n</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>-f</arg><arg>-</arg>
|
||||||
|
<arg>-o</arg><arg>downloads/updated_works</arg>
|
||||||
|
<arg>-t</arg><arg>${token}</arg>
|
||||||
|
</spark>
|
||||||
|
<ok to="End"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -344,7 +344,7 @@ public class OrcidClientTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void cleanAuthorListTest() throws Exception {
|
private void cleanAuthorListTest() throws Exception {
|
||||||
AuthorData a1 = new AuthorData();
|
AuthorData a1 = new AuthorData();
|
||||||
a1.setOid("1");
|
a1.setOid("1");
|
||||||
a1.setName("n1");
|
a1.setName("n1");
|
||||||
|
@ -370,4 +370,14 @@ public class OrcidClientTest {
|
||||||
list.removeIf(a -> !namesAlreadySeen.add(a.getOid()));
|
list.removeIf(a -> !namesAlreadySeen.add(a.getOid()));
|
||||||
assertTrue(list.size() == 2);
|
assertTrue(list.size() == 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testReadDownloadedWork() throws Exception {
|
||||||
|
final String base64CompressedRecord = IOUtils
|
||||||
|
.toString(getClass().getResourceAsStream("0000-0002-6664-7451_work.compressed.base64"));
|
||||||
|
final String work = ArgumentApplicationParser.decompressValue(base64CompressedRecord);
|
||||||
|
logToFile("\n\ndownloaded \n\n" + work);
|
||||||
|
// final String downloadedRecord = testDownloadRecord("0000-0003-3028-6161", REQUEST_TYPE_RECORD);
|
||||||
|
// assertTrue(recordFromSeqFile.equals(downloadedRecord));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,17 +4,27 @@ package eu.dnetlib.doiboost.orcid.xml;
|
||||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.time.LocalDate;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
|
import com.ximpleware.*;
|
||||||
|
|
||||||
import eu.dnetlib.dhp.schema.orcid.AuthorData;
|
import eu.dnetlib.dhp.schema.orcid.AuthorData;
|
||||||
import eu.dnetlib.doiboost.orcid.OrcidClientTest;
|
import eu.dnetlib.doiboost.orcid.OrcidClientTest;
|
||||||
|
import eu.dnetlib.doiboost.orcid.SparkDownloadOrcidWorks;
|
||||||
import eu.dnetlib.doiboost.orcid.model.WorkData;
|
import eu.dnetlib.doiboost.orcid.model.WorkData;
|
||||||
import eu.dnetlib.doiboost.orcidnodoi.json.JsonWriter;
|
import eu.dnetlib.doiboost.orcidnodoi.json.JsonWriter;
|
||||||
|
|
||||||
public class XMLRecordParserTest {
|
public class XMLRecordParserTest {
|
||||||
|
private static final String NS_WORK = "work";
|
||||||
|
private static final String NS_WORK_URL = "http://www.orcid.org/ns/work";
|
||||||
|
private static final String NS_COMMON_URL = "http://www.orcid.org/ns/common";
|
||||||
|
private static final String NS_COMMON = "common";
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
private void testOrcidAuthorDataXMLParser() throws Exception {
|
private void testOrcidAuthorDataXMLParser() throws Exception {
|
||||||
|
@ -67,9 +77,6 @@ public class XMLRecordParserTest {
|
||||||
String xml = IOUtils
|
String xml = IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
this.getClass().getResourceAsStream("summary_0000-0001-5109-1000_othername.xml"));
|
this.getClass().getResourceAsStream("summary_0000-0001-5109-1000_othername.xml"));
|
||||||
|
|
||||||
XMLRecordParser p = new XMLRecordParser();
|
|
||||||
|
|
||||||
AuthorData authorData = XMLRecordParser.VTDParseAuthorData(xml.getBytes());
|
AuthorData authorData = XMLRecordParser.VTDParseAuthorData(xml.getBytes());
|
||||||
assertNotNull(authorData);
|
assertNotNull(authorData);
|
||||||
assertNotNull(authorData.getOtherNames());
|
assertNotNull(authorData.getOtherNames());
|
||||||
|
@ -80,14 +87,18 @@ public class XMLRecordParserTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWorkIdLastModifiedDateXMLParser() throws Exception {
|
public void testWorkIdLastModifiedDateXMLParser() throws Exception {
|
||||||
|
|
||||||
String xml = IOUtils
|
String xml = IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
this.getClass().getResourceAsStream("record_8888-8888-8888-8880.xml"));
|
this.getClass().getResourceAsStream("record_0000-0001-5004-5918.xml"));
|
||||||
// Map<String, String> workIdLastModifiedDate = XMLRecordParser.retrieveWorkIdLastModifiedDate(xml.getBytes());
|
Map<String, String> workIdLastModifiedDate = XMLRecordParser.retrieveWorkIdLastModifiedDate(xml.getBytes());
|
||||||
// String LastModifiedDate = workIdLastModifiedDate.get(0);
|
workIdLastModifiedDate.forEach((k, v) -> {
|
||||||
// OrcidClientTest.logToFile(LastModifiedDate + " -- " + workIdLastModifiedDate.get(LastModifiedDate));
|
try {
|
||||||
String result = XMLRecordParser.retrieveWorkIdFromSummary(xml.getBytes(), "empty");
|
OrcidClientTest
|
||||||
OrcidClientTest.logToFile(result);
|
.logToFile(
|
||||||
|
k + " " + v + " isModified after " + SparkDownloadOrcidWorks.lastUpdateValue + ": "
|
||||||
|
+ SparkDownloadOrcidWorks.isModified("0000-0001-5004-5918", v));
|
||||||
|
} catch (IOException e) {
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
H4sIAAAAAAAAAO1c63LbNhb+n6fA6EebTE2JulpyYnXVpE2a1Jus7V5mO/0BkZCImCJVgLSidjqzf/cJ9oH2TfZJ9jsASVESLWdsddNulJlcDJxzcO4XEJMnn7+bhexaKC3j6LTWrLs1JiIv9mU0Pa19e/mV068xnfDI52EcidPaUuja58MHTxaxujqhP9g8TRwgYK/Xb/Z7TbdZY3OeBKe1hotfDn63nF6v13GOO91mg3AaK8hrqeVYhjJZntbm6TiUXo2BpUifyCgRKuLhaS1IkvlJo7FYLOqx8qSPP6eNSDdyiBxD+KnHEyPITSgFSI7jS53IyNuNVQIq8MRcCZAS/g60AibHipNAKCfiM3Ez1gomx5qJ2RgWCuT8ZqwVTKENpWK1QxO0ncN68Wy2SwF2P4eGULHaIbfdz6HnYCuGlRxfJFyG+ma8TcicwpVYLnYemAEUks+AvUNy2i5g31kfcqQvokROpNils23gnM4kjWzM3ISbARRaUWIiFEJN7FLICijH476vhN6BkwGsouhawgGdeazlbiffhMwpUMDejEW7OWSAMInV8mbgDGBlp3kYL2dQ5S5j5TA51s8pD6H62yJ9DSzH1UJdS29H8GUA6757m8cWtkGGgA7lLpOuYFbRpAVXHgV9qna47TrcikP8rMS1FItdbBZAOd44DXdYlXY3+QMBHadql/a2QGvDBwy/ntj8ceIpQdnQ8fHnsOW2UByaTtu9bLVOOv2TJqpPx/37k0YV9BqdkOvEmaFIIQLL1Jqu02pdus0T1z1xe/VOu7+iVoGzRtMybNe21x0vlPBBBP4KogyVKjkkrWioZaUSi9QYvXnjdH948bfLL1vtN98evx5dXA4KvgizkiTV0OFOVANRiRvEOhkWfBQIZnklYeNWETeUQEVp+ApZ7FPNnsZhKKaCfRNHfhxt0jKQDypOyRZN+5DIJKzQuF2+iD3JQ/aF4jJiX6W2+mLhjCepMkHNsPFXsRjHKmJfRxMeJZp9L5OAoVsx/4jThHH2FZ/JcMle2NzD4gkbpYnUM3YxF16i0hl7JjWqh1AFqyXGnjQ2WbW8v4U0VAnsxsvR2Qi8JKYhiuciytDWoUroOohVgjqnPSXnJMzwkzB5PP9kmjz+ejbHHkfSP2HfBzxhUkNShD1lZxYrxr2fU6nwb8gfiVSh97oWYTynJAkFeTCISeCa6dSDNjTjVmCdC+xnArOHo4tnj+iAKCZVTeQ7OiJNoAdxxMbQn4x0IrhPMJxdp2EkFLf9GktiLBU0odcEtkr0ERO0CONB69paEVGHVJyGlPfq7GtbPZdwJIZmh41lHMZTpOqQzYQX8AjM4jhtkEnoBVl1/XAljBI0C+P4ighBTOQeHAmtIPELWkApQ3cZkihiEithTzMeBXl0wOcgPl4SXBLxZOP8yEcoGxTxDolemjpMcobI4DjRcIVtLTLJ62wUyRmo6CT1ISn0P50KnQAIZtSp9gRsvdJehfFyy+B4JTVILAIRsamIRCK9nCWBSq3iKEMB3JVmE8sqeCnZn4foV6gZp7bFsK6XkRcAN051poisIBm9kawkqdUF/Sv2rRskKN0sgEojsKugTnAl3iGyIuuHQTrj5I0I0QQmJmduGG8u3Pr1+K2go+DVlzEZF00KSUfdrmU0slENLiercJ+twp3Yt+5kOfek8lKo3fjmhrPAl23YB6Wwv3hmQ8akjEomnwktp9ERuxAJGv7pkUklb7iC8uWcEswJMo1VhhdTCBtTG+rtXiF+xkJkebFZqJKdoxUKukOhFrAoJJ5aa1MRjSgPMDjV1Ph4wi4SdhnEM1jiRaznkuwEmWwSPmJfRtMQ5x6xVBt45gtfmgkkO6lQXk5SLxHfMxg0WZBNX6aRYK32EWu5za4Vf5ROU/hw06z160hza1IiaShNqWyqhADPIScj203S+MPzzx4ZOmRoG4V5JIfC5BBKTiSvDSIDu6bJSgU+PHcesQUo4khPpSY3ZjFgbVJnFyVfp1CD7GVnt3pQYmpCJZTRFUiAn8zHch9kC07Gns05Um6Vz5wRmdc2Z1ruzwTXKax3ws4z6vhhjr8pFxkut84gQbQIESG5Bxetv82zZjbWAXZnGI4cjthYaqlzzbKQ0shmhBfiEkVwKbgXZBIbsVINelQfQNSwbLJb7JVYswUlEiXF8YwEtuCJMSUn2slZqrPnKk7nJudnw8sR0UgUOgZyOaMA8Q7ehfYBLj2WKgmKn7THI+t4U0Pm3/8yO2bW54YlkDP6yvNPlVHOhUa1gQUuoZuJJF7R8qFciYR4AZummE5Ys8/OPwN12z48bLYRf6F4DIX4EhntR8WjqfjJVAjkW41SR25UZrXTqg/a7MeOW3ddp9Op93s/gT9xpa3b0wHOfQ/ouuzH9qDeGtAB3X5+QDkYg9hqBdIEqNeUx8z4EyUmaqaUZo2TbNWBzQqgAJwYhqgAKLiClrDZjD1M/vOPf57id6ve6T9mb7Kf0LVbUUMxAR4Kl7B9CKVNsFagteuD3jpandIpJlZTr45sijCeycsC3OgJuV8T1zzK2NViSpXRNCQmMCami0lDXubEbVcI4ME9AZeIEvNWGzn1E1Yi4ZZJgJ45ahuyVe83NyA3VFyGPT6uoloJ2u2ugVptrrz56DZ7+4JGLMoBMRX19oBSTadrnevTbZc8onpNGNXkstNklFOFZUqub84w6RmzQdZcVIXu0zjywlTbBgZGOUdavLbt8EWl1+q8GfSZj2kKGWa9aVilMkRClsxMQTTtOvLVJdVzW8gncWoSKrXdRatguxvoM+DXtqzeUvOMB290JFshuDvPkuT+Uq9LYlx/JYG6obrMVQzXNR2APdWx3X5WdWAQRLMhWtJ/NrFsDyalqcVDv7Fa2153kuVcDMdynIh3Gb31rZvwrnmYiuFfTKMVil87/nG33ez1B72+3/EHYtxqdwb+2D9u9pu+N3aPQMeMVIbWKat9gGGxRkzwMaIDnmiYOAxuh8Htzz64/fGmtMNIdhjJdo5kh/nrQ89fh2HrMGwdhq0//rB1mKz+h5OVnQ9S1EqVDSkv0Vsm7KnkSqF6c8PIS8ooaFzZ60/PoGgvQCuccJC2BuIhYhIjx0wie19blGd8gj6XfUGdQyjM0jeph940Zk8NN7HzHHnOt1ujCBxES/ZGIcLMypczMPwiBffWCy4SIaOFQGf168sYrERYfxXyVP+WcUhrnL1C6uQ6o0Bl/41QympztBRoydlLfk3lDAvfhdwHz4qDeIwKFIiM93MevYUORldxKK64sudTqQ7Yd9JLYpUdqcU8YC/4WzKekVl4aKLYWarmwTLTwrUEJ/6CK99ydYlaeCXZCIIG0qw8p3YCzdOZNwqpbTMmWULDLJ8b0T4NzOoM9THIVvlc0ZIfS1YANt1603Wbjbcc/mrdmz7z1YlAvdnv9Q0V8DhNKW0SCjV+6BjMxnUcpjORH2qWsk+DmWtsfj80IFLraMVq97jjtPtu12zl7YiirREsSrkbjY9vhrFRFiH08oGgo5QeB2WEOlj6bXM6twN4+Yvn+qyffbClGT7/ppkN6/kH0mK8L75fm9dclvzqc3sZgkwxJA0WH17NyhacMc7Q7RRgdmELzufLodstoOjH9U/Q1Szl6KXXPXqbeGm3+pt7CcBedmSfwkk9WCuY2IK7lZo1Tn4p4tCtiEPXIg7dizjli5HKQ0q23XVKRKkrlL9Qy438oaV5l4N6JGp3P3tF9HYGbLZHug3kfIhmfFJJcQ1q+y1DpZnubsP5bA+Wa7uDbrPZ6/xe1tlJ/89uAbEHA7Qc3aq7Tr/r9jrtVrvd7f5epnjPk/7sRkFtvLdRbi2pv5eN7nbwhzdZ1Y5eL2GpCotnaFdeOEdrVcffde7V06uGuZ4OGyJqlAqhbtjm1TGXL86qa3ZWHbKDjaxjd7IJw6HW20GX5WT3QQ537H2Qk90HOfHEsffXTn7X7OS3pA/fp6A8qgfJLCw9lAvXvkXQjYYcpziqXK0396qNVQJwzDO5dbB1ldqXfWsP+/KH7U3neNBpOt1W2y3xKW+mZp7s7cKueNPXeD+mM9ExrMnEvr/bHDjO4uiXOH+aVgasolM6jCf2n0JXCLYFrdDbD+3gkx+1ubsh33sduA32wazecvpuu+30Bt0dzzhvtHoV9l6tftNIeTD8/Q3fG7htRO3gLuFehb1Pw2/eFhzsfV97t52WOzh2BseDH+5g7yrsfdp7/SLoI7T2lsDV92AHzYjh2jXgQSFiWLoF/QjVsfe62G73eo47aLfuVBe3sffaELFXxSX3R2jrigaxfKN/0Aglg+KDxkeojr3PxL1O59jptbp3aZqqsPeZDMrfqj5CW28JXPWp7qAXGqbWvlR+hCrZe4/QbTc7znGv1btTj7CNvc+0sPYR+mDs+xu71Ru4Trcz6N7J2NvY+70hK70vOBh7D7di+f/ucrdbsS3svd6S2Kcjz7PHIwdz3/9SrNOnTxdu7y6JvAp7r/1ddtGx9j7oYPQ9TPjdrus00ZzfbcLfwt6n0deefh2MfX9jdzq9ntNqd9p3MvY29j6Nvfmq7//M3tvrG9/480eG5j9dG4rVf72yvvEgI0R/DB/8F4+Tql7oTQAA
|
File diff suppressed because it is too large
Load Diff
|
@ -732,7 +732,7 @@
|
||||||
<common:external-id-relationship>part-of</common:external-id-relationship>
|
<common:external-id-relationship>part-of</common:external-id-relationship>
|
||||||
</common:external-id>
|
</common:external-id>
|
||||||
</common:external-ids>
|
</common:external-ids>
|
||||||
<work:work-summary put-code="0" visibility="private">
|
<work:work-summary put-code="123456" visibility="private">
|
||||||
<common:created-date>2001-12-31T12:00:00</common:created-date>
|
<common:created-date>2001-12-31T12:00:00</common:created-date>
|
||||||
<common:last-modified-date>2001-12-31T12:00:00</common:last-modified-date>
|
<common:last-modified-date>2001-12-31T12:00:00</common:last-modified-date>
|
||||||
<common:source>
|
<common:source>
|
||||||
|
|
Loading…
Reference in New Issue