fixed missing parameter on download update

This commit is contained in:
sandro.labruzzo 2024-01-12 16:18:20 +01:00
parent 859babf722
commit e328bc0ade
5 changed files with 259 additions and 259 deletions

View File

@ -1,6 +1,14 @@
package eu.dnetlib.dhp.collection.orcid; package eu.dnetlib.dhp.collection.orcid;
import eu.dnetlib.dhp.common.collection.HttpClientParams; import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.concurrent.BlockingQueue;
import javax.swing.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
@ -10,233 +18,225 @@ import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import javax.swing.*; import eu.dnetlib.dhp.common.collection.HttpClientParams;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.concurrent.BlockingQueue;
public class ORCIDWorker extends Thread { public class ORCIDWorker extends Thread {
final static Logger log = LoggerFactory.getLogger(ORCIDWorker.class); final static Logger log = LoggerFactory.getLogger(ORCIDWorker.class);
public static String JOB_COMPLETE="JOB_COMPLETE"; public static String JOB_COMPLETE = "JOB_COMPLETE";
private static final String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)"; private static final String userAgent = "Mozilla/5.0 (compatible; OAI; +http://www.openaire.eu)";
private final BlockingQueue<String> queue; private final BlockingQueue<String> queue;
private boolean hasComplete = false; private boolean hasComplete = false;
private final SequenceFile.Writer employments; private final SequenceFile.Writer employments;
private final SequenceFile.Writer summary; private final SequenceFile.Writer summary;
private final SequenceFile.Writer works; private final SequenceFile.Writer works;
private final String token; private final String token;
private final String id; private final String id;
public static ORCIDWorkerBuilder builder() { public static ORCIDWorkerBuilder builder() {
return new ORCIDWorkerBuilder(); return new ORCIDWorkerBuilder();
} }
public ORCIDWorker(String id, BlockingQueue<String> myqueue, SequenceFile.Writer employments, SequenceFile.Writer summary, SequenceFile.Writer works, String token) { public ORCIDWorker(String id, BlockingQueue<String> myqueue, SequenceFile.Writer employments,
this.id= id; SequenceFile.Writer summary, SequenceFile.Writer works, String token) {
this.queue = myqueue; this.id = id;
this.employments = employments; this.queue = myqueue;
this.summary = summary; this.employments = employments;
this.works = works; this.summary = summary;
this.token = token; this.works = works;
} this.token = token;
}
public static String retrieveURL(final String id, final String apiUrl, String token) {
try {
final HttpURLConnection urlConn = getHttpURLConnection(apiUrl, token);
if (urlConn.getResponseCode() > 199 && urlConn.getResponseCode() < 300) {
InputStream input = urlConn.getInputStream();
return IOUtils.toString(input);
} else {
log
.error(
"Thread {} UNABLE TO DOWNLOAD FROM THIS URL {} , status code {}", id, apiUrl,
urlConn.getResponseCode());
}
} catch (Exception e) {
log.error("Thread {} Error on retrieving URL {} {}", id, apiUrl, e);
}
return null;
}
public static String retrieveURL(final String id,final String apiUrl, String token) { @NotNull
try { private static HttpURLConnection getHttpURLConnection(String apiUrl, String token) throws IOException {
final HttpURLConnection urlConn = getHttpURLConnection(apiUrl, token); final HttpURLConnection urlConn = (HttpURLConnection) new URL(apiUrl).openConnection();
if (urlConn.getResponseCode()>199 && urlConn.getResponseCode()<300) { final HttpClientParams clientParams = new HttpClientParams();
InputStream input = urlConn.getInputStream(); urlConn.setInstanceFollowRedirects(false);
return IOUtils.toString(input); urlConn.setReadTimeout(clientParams.getReadTimeOut() * 1000);
} else { urlConn.setConnectTimeout(clientParams.getConnectTimeOut() * 1000);
log.error("Thread {} UNABLE TO DOWNLOAD FROM THIS URL {} , status code {}",id, apiUrl,urlConn.getResponseCode()); urlConn.addRequestProperty(HttpHeaders.USER_AGENT, userAgent);
} urlConn.addRequestProperty(HttpHeaders.AUTHORIZATION, String.format("Bearer %s", token));
} catch (Exception e) { return urlConn;
log.error("Thread {} Error on retrieving URL {} {}",id,apiUrl, e); }
}
return null;
}
@NotNull private static String generateSummaryURL(final String orcidId) {
private static HttpURLConnection getHttpURLConnection(String apiUrl, String token) throws IOException { return "https://api.orcid.org/v3.0/" + orcidId + "/record";
final HttpURLConnection urlConn = (HttpURLConnection) new URL(apiUrl).openConnection(); }
final HttpClientParams clientParams = new HttpClientParams();
urlConn.setInstanceFollowRedirects(false);
urlConn.setReadTimeout(clientParams.getReadTimeOut() * 1000);
urlConn.setConnectTimeout(clientParams.getConnectTimeOut() * 1000);
urlConn.addRequestProperty(HttpHeaders.USER_AGENT, userAgent);
urlConn.addRequestProperty(HttpHeaders.AUTHORIZATION, String.format("Bearer %s", token));
return urlConn;
}
private static String generateSummaryURL(final String orcidId) { private static String generateWorksURL(final String orcidId) {
return "https://api.orcid.org/v3.0/" + orcidId + "/record"; return "https://api.orcid.org/v3.0/" + orcidId + "/works";
} }
private static String generateWorksURL(final String orcidId) { private static String generateEmploymentsURL(final String orcidId) {
return "https://api.orcid.org/v3.0/" + orcidId + "/works"; return "https://api.orcid.org/v3.0/" + orcidId + "/employments";
} }
private static String generateEmploymentsURL(final String orcidId) {
return "https://api.orcid.org/v3.0/" + orcidId + "/employments";
}
private static void writeResultToSequenceFile(String id, String url, String token, String orcidId,
SequenceFile.Writer file) throws IOException {
final String response = retrieveURL(id, url, token);
if (response != null) {
private static void writeResultToSequenceFile(String id, String url, String token, String orcidId, SequenceFile.Writer file) throws IOException { if (orcidId == null || response == null) {
final String response = retrieveURL(id, url,token); log.error("Thread {} {} {}", id, orcidId, response);
if (response!= null) { throw new RuntimeException("null items ");
}
if(orcidId==null || response ==null){ if (file == null) {
log.error("Thread {} {} {}",id, orcidId, response); log.error("Thread {} file is null for {} URL:{}", id, url, orcidId);
throw new RuntimeException("null items "); } else
} file.append(new Text(orcidId), new Text(response));
}
if(file==null) { }
log.error("Thread {} file is null for {} URL:{}",id, url, orcidId);
}
else
file.append(new Text(orcidId),new Text(response));
}
} @Override
public void run() {
final Text key = new Text();
final Text value = new Text();
long start;
long total_time;
String orcidId = "";
int requests = 0;
if (summary == null || employments == null || works == null)
throw new RuntimeException("Null files");
while (!hasComplete) {
try {
@Override orcidId = queue.take();
public void run() {
final Text key = new Text();
final Text value = new Text();
long start;
long total_time;
String orcidId="";
int requests =0;
if(summary==null || employments==null || works == null)
throw new RuntimeException("Null files");
while (!hasComplete) { if (orcidId.equalsIgnoreCase(JOB_COMPLETE)) {
try { queue.put(orcidId);
hasComplete = true;
} else {
start = System.currentTimeMillis();
writeResultToSequenceFile(id, generateSummaryURL(orcidId), token, orcidId, summary);
total_time = System.currentTimeMillis() - start;
requests++;
if (total_time < 1000) {
// I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds,
// hence
// the time between two http request in a thread must be 1 second
Thread.sleep(1000L - total_time);
}
start = System.currentTimeMillis();
writeResultToSequenceFile(id, generateWorksURL(orcidId), token, orcidId, works);
total_time = System.currentTimeMillis() - start;
requests++;
if (total_time < 1000) {
// I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds,
// hence
// the time between two http request in a thread must be 1 second
Thread.sleep(1000L - total_time);
}
start = System.currentTimeMillis();
writeResultToSequenceFile(id, generateEmploymentsURL(orcidId), token, orcidId, employments);
total_time = System.currentTimeMillis() - start;
requests++;
if (total_time < 1000) {
// I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds,
// hence
// the time between two http request in a thread must be 1 second
Thread.sleep(1000L - total_time);
}
if (requests % 30 == 0) {
log.info("Thread {} Downloaded {}", id, requests);
}
}
orcidId = queue.take(); } catch (Throwable e) {
if (orcidId.equalsIgnoreCase(JOB_COMPLETE)) { log.error("Thread {} Unable to save ORICD: {} item error", id, orcidId, e);
queue.put(orcidId);
hasComplete = true;
} else {
start = System.currentTimeMillis();
writeResultToSequenceFile(id, generateSummaryURL(orcidId), token,orcidId, summary);
total_time = System.currentTimeMillis() - start;
requests++;
if (total_time < 1000) {
//I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds, hence
//the time between two http request in a thread must be 1 second
Thread.sleep(1000L - total_time);
}
start = System.currentTimeMillis();
writeResultToSequenceFile(id, generateWorksURL(orcidId),token,orcidId, works);
total_time = System.currentTimeMillis() - start;
requests++;
if (total_time < 1000) {
//I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds, hence
//the time between two http request in a thread must be 1 second
Thread.sleep(1000L - total_time);
}
start = System.currentTimeMillis();
writeResultToSequenceFile(id, generateEmploymentsURL(orcidId),token,orcidId, employments);
total_time = System.currentTimeMillis() - start;
requests++;
if (total_time < 1000) {
//I know making a sleep on a thread is bad, but we need to stay to 24 requests per seconds, hence
//the time between two http request in a thread must be 1 second
Thread.sleep(1000L - total_time);
}
if (requests %30 ==0)
{
log.info("Thread {} Downloaded {}",id, requests);
}
}
}
} catch (Throwable e) { }
try {
works.close();
summary.close();
employments.close();
} catch (Throwable e) {
throw new RuntimeException(e);
}
log.error("Thread {} Unable to save ORICD: {} item error",id, orcidId,e); log.info("Thread {} COMPLETE ", id);
log.info("Thread {} Downloaded {}", id, requests);
} }
} public static class ORCIDWorkerBuilder {
try {
works.close();
summary.close();
employments.close();
} catch (Throwable e) {
throw new RuntimeException(e);
}
private String id;
private SequenceFile.Writer employments;
private SequenceFile.Writer summary;
private SequenceFile.Writer works;
private BlockingQueue<String> queue;
log.info("Thread {} COMPLETE ", id); private String token;
log.info("Thread {} Downloaded {}", id, requests);
} public ORCIDWorkerBuilder withId(final String id) {
this.id = id;
return this;
}
public ORCIDWorkerBuilder withEmployments(final SequenceFile.Writer sequenceFile) {
this.employments = sequenceFile;
return this;
}
public static class ORCIDWorkerBuilder { public ORCIDWorkerBuilder withSummary(final SequenceFile.Writer sequenceFile) {
this.summary = sequenceFile;
return this;
}
private String id; public ORCIDWorkerBuilder withWorks(final SequenceFile.Writer sequenceFile) {
private SequenceFile.Writer employments; this.works = sequenceFile;
private SequenceFile.Writer summary; return this;
private SequenceFile.Writer works; }
private BlockingQueue<String> queue;
private String token; public ORCIDWorkerBuilder withAccessToken(final String accessToken) {
this.token = accessToken;
return this;
}
public ORCIDWorkerBuilder withId(final String id) { public ORCIDWorkerBuilder withBlockingQueue(final BlockingQueue<String> queue) {
this.id =id; this.queue = queue;
return this; return this;
} }
public ORCIDWorkerBuilder withEmployments(final SequenceFile.Writer sequenceFile) { public ORCIDWorker build() {
this.employments = sequenceFile; if (this.summary == null || this.works == null || this.employments == null || StringUtils.isEmpty(token)
return this; || queue == null)
} throw new RuntimeException("Unable to build missing required params");
return new ORCIDWorker(id, queue, employments, summary, works, token);
}
public ORCIDWorkerBuilder withSummary(final SequenceFile.Writer sequenceFile) {
this.summary = sequenceFile;
return this;
}
public ORCIDWorkerBuilder withWorks(final SequenceFile.Writer sequenceFile) {
this.works = sequenceFile;
return this;
}
public ORCIDWorkerBuilder withAccessToken(final String accessToken) {
this.token = accessToken;
return this;
}
public ORCIDWorkerBuilder withBlockingQueue(final BlockingQueue<String> queue) {
this.queue = queue;
return this;
}
public ORCIDWorker build() {
if (this.summary== null || this.works==null || this.employments == null || StringUtils.isEmpty(token) || queue == null)
throw new RuntimeException("Unable to build missing required params");
return new ORCIDWorker(id, queue,employments,summary,works,token);
}
}
}
} }

View File

@ -1,15 +1,17 @@
package eu.dnetlib.dhp.collection.orcid; package eu.dnetlib.dhp.collection.orcid;
import static eu.dnetlib.dhp.utils.DHPUtils.getHadoopConfiguration;
import java.io.*; import java.io.*;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.net.URL; import java.net.URL;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue; import java.util.concurrent.BlockingQueue;
import eu.dnetlib.dhp.common.collection.HttpClientParams;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
@ -23,96 +25,108 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.collection.HttpClientParams;
import static eu.dnetlib.dhp.utils.DHPUtils.getHadoopConfiguration;
public class OrcidGetUpdatesFile { public class OrcidGetUpdatesFile {
private static Logger log = LoggerFactory.getLogger(OrcidGetUpdatesFile.class); private static Logger log = LoggerFactory.getLogger(OrcidGetUpdatesFile.class);
public static void main(String[] args) throws IOException { public static void main(String[] args) throws Exception {
ArgumentApplicationParser parser = new ArgumentApplicationParser( ArgumentApplicationParser parser = new ArgumentApplicationParser(
"/eu/dnetlib/dhp/collection/orcid/download_orcid_update_parameter.json"); IOUtils
.toString(
Objects
.requireNonNull(
OrcidGetUpdatesFile.class
.getResourceAsStream(
"/eu/dnetlib/dhp/collection/orcid/download_orcid_update_parameter.json")))
final String namenode = parser.get("namenode"); );
log.info("got variable namenode: {}", namenode); parser.parseArgument(args);
final String targetPath = parser.get("targetPath"); final String namenode = parser.get("namenode");
log.info("got variable targetPath: {}", targetPath); log.info("got variable namenode: {}", namenode);
final String targetPath = parser.get("targetPath");
log.info("got variable targetPath: {}", targetPath);
final String apiURL = parser.get("apiURL"); final String apiURL = parser.get("apiURL");
log.info("got variable apiURL: {}", apiURL); log.info("got variable apiURL: {}", apiURL);
final String accessToken = parser.get("accessToken"); final String accessToken = parser.get("accessToken");
log.info("got variable accessToken: {}", accessToken); log.info("got variable accessToken: {}", accessToken);
System.out.println("namenode = " + namenode);
final FileSystem fileSystem = FileSystem.get(getHadoopConfiguration(namenode));
final FileSystem fileSystem = FileSystem.get(getHadoopConfiguration(namenode)); new OrcidGetUpdatesFile().readTar(fileSystem, accessToken, apiURL, targetPath, "2023-09-30");
}
private SequenceFile.Writer createFile(Path aPath, FileSystem fileSystem) throws IOException {
return SequenceFile
.createWriter(
fileSystem.getConf(),
SequenceFile.Writer.file(aPath),
SequenceFile.Writer.keyClass(Text.class),
SequenceFile.Writer.valueClass(Text.class));
} }
private SequenceFile.Writer createFile(Path aPath, FileSystem fileSystem) throws IOException {
return SequenceFile
.createWriter(
fileSystem.getConf(),
SequenceFile.Writer.file(aPath),
SequenceFile.Writer.keyClass(Text.class),
SequenceFile.Writer.valueClass(Text.class));
}
private ORCIDWorker createWorker(final String id, final String targetPath, final BlockingQueue<String> queue, final String accessToken, FileSystem fileSystem) throws Exception { private ORCIDWorker createWorker(final String id, final String targetPath, final BlockingQueue<String> queue,
return ORCIDWorker.builder() final String accessToken, FileSystem fileSystem) throws Exception {
.withId(id) return ORCIDWorker
.withEmployments(createFile(new Path(String.format("%s/employments_%s", targetPath, id)), fileSystem)) .builder()
.withSummary(createFile(new Path(String.format("%s/summary_%s", targetPath, id)), fileSystem)) .withId(id)
.withWorks(createFile(new Path(String.format("%s/works_%s", targetPath, id)), fileSystem)) .withEmployments(createFile(new Path(String.format("%s/employments_%s", targetPath, id)), fileSystem))
.withAccessToken(accessToken) .withSummary(createFile(new Path(String.format("%s/summary_%s", targetPath, id)), fileSystem))
.withBlockingQueue(queue) .withWorks(createFile(new Path(String.format("%s/works_%s", targetPath, id)), fileSystem))
.build(); .withAccessToken(accessToken)
} .withBlockingQueue(queue)
.build();
}
public void readTar(FileSystem fileSystem, final String accessToken, final String apiURL, final String targetPath,
final String startDate) throws Exception {
public void readTar(FileSystem fileSystem, final String accessToken, final String apiURL, final String targetPath, final String startDate ) throws Exception {
final HttpURLConnection urlConn = (HttpURLConnection) new URL(apiURL).openConnection(); final HttpURLConnection urlConn = (HttpURLConnection) new URL(apiURL).openConnection();
final HttpClientParams clientParams = new HttpClientParams(); final HttpClientParams clientParams = new HttpClientParams();
urlConn.setInstanceFollowRedirects(false); urlConn.setInstanceFollowRedirects(false);
urlConn.setReadTimeout(clientParams.getReadTimeOut() * 1000); urlConn.setReadTimeout(clientParams.getReadTimeOut() * 1000);
urlConn.setConnectTimeout(clientParams.getConnectTimeOut() * 1000); urlConn.setConnectTimeout(clientParams.getConnectTimeOut() * 1000);
if (urlConn.getResponseCode()>199 && urlConn.getResponseCode()<300) { if (urlConn.getResponseCode() > 199 && urlConn.getResponseCode() < 300) {
InputStream input = urlConn.getInputStream(); InputStream input = urlConn.getInputStream();
TarArchiveInputStream tais = new TarArchiveInputStream(new GzipCompressorInputStream( TarArchiveInputStream tais = new TarArchiveInputStream(new GzipCompressorInputStream(
new BufferedInputStream( new BufferedInputStream(
input))); input)));
TarArchiveEntry entry; TarArchiveEntry entry;
BlockingQueue<String> queue = new ArrayBlockingQueue<String>(3000); BlockingQueue<String> queue = new ArrayBlockingQueue<String>(3000);
final List<ORCIDWorker> workers = new ArrayList<>(); final List<ORCIDWorker> workers = new ArrayList<>();
for (int i = 0; i <20; i++) { for (int i = 0; i < 22; i++) {
workers.add(createWorker(""+i,targetPath,queue,accessToken, fileSystem)); workers.add(createWorker("" + i, targetPath, queue, accessToken, fileSystem));
} }
workers.forEach(Thread::start); workers.forEach(Thread::start);
while ((entry = tais.getNextTarEntry()) != null) {
while ((entry = tais.getNextTarEntry()) != null) {
if (entry.isFile()) { if (entry.isFile()) {
BufferedReader br = new BufferedReader(new InputStreamReader(tais)); BufferedReader br = new BufferedReader(new InputStreamReader(tais));
System.out.println(br.readLine()); System.out.println(br.readLine());
br.lines().map(l -> l.split(",")).filter(s -> StringUtils.compare(s[3].substring(0, 10), startDate) > 0).map(s->s[0]).limit(200).forEach(s -> { br
try { .lines()
log.info("Adding item "); .map(l -> l.split(","))
queue.put(s); .filter(s -> StringUtils.compare(s[3].substring(0, 10), startDate) > 0)
} catch (InterruptedException e) { .map(s -> s[0])
throw new RuntimeException(e); .forEach(s -> {
} try {
}); log.info("Adding item ");
queue.put(s);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
queue.put(ORCIDWorker.JOB_COMPLETE); queue.put(ORCIDWorker.JOB_COMPLETE);
} }
@ -122,12 +136,7 @@ public class OrcidGetUpdatesFile {
worker.join(); worker.join();
} }
} }
}
}
} }

View File

@ -1,5 +1,4 @@
[ [ {
{
"paramName": "n", "paramName": "n",
"paramLongName": "namenode", "paramLongName": "namenode",
"paramDescription": "the Name Node URI", "paramDescription": "the Name Node URI",
@ -23,5 +22,4 @@
"paramDescription": "the accessToken to contact API", "paramDescription": "the accessToken to contact API",
"paramRequired": true "paramRequired": true
} }
] ]

View File

@ -6,12 +6,12 @@
</property> </property>
<property> <property>
<name>apiURL</name> <name>apiURL</name>
<description>The URL of the update CSV list </description>
<value>http://74804fb637bd8e2fba5b-e0a029c2f87486cddec3b416996a6057.r3.cf1.rackcdn.com/last_modified.csv.tar</value> <value>http://74804fb637bd8e2fba5b-e0a029c2f87486cddec3b416996a6057.r3.cf1.rackcdn.com/last_modified.csv.tar</value>
<description>The URL of the update CSV list </description>
</property> </property>
<property> <property>
<name>accessToken</name> <name>accessToken</name>
<description>The access tocken</description> <description>The access token</description>
</property> </property>
</parameters> </parameters>

View File

@ -124,16 +124,9 @@ public class DownloadORCIDTest {
// @Test // @Test
// public void testReadTar() throws Exception { // public void testReadTar() throws Exception {
//// new OrcidGetUpdatesFile().readTar(); // OrcidGetUpdatesFile.main(new String[] {
// // "--namenode", "puppa"
// Configuration conf = new Configuration(); // });
// FileSystem fs = FileSystem.get(URI.create("file:///"), conf);
// final String token ="78fdb232-7105-4086-8570-e153f4198e3d";
//
// new OrcidGetUpdatesFile().readTar(fs,token, "http://74804fb637bd8e2fba5b-e0a029c2f87486cddec3b416996a6057.r3.cf1.rackcdn.com/last_modified.csv.tar", "file:///Users/sandro/orcid","2023-09-30");
//
//
//
// //
// } // }