2020-04-20 10:02:42 +02:00
|
|
|
package eu.dnetlib.doiboost.orcid;
|
2020-04-17 18:52:39 +02:00
|
|
|
|
|
|
|
import java.io.IOException;
|
|
|
|
import java.net.URI;
|
2020-04-20 12:00:04 +02:00
|
|
|
import org.apache.commons.io.IOUtils;
|
2020-04-17 18:52:39 +02:00
|
|
|
import org.apache.hadoop.conf.Configuration;
|
|
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
|
|
import org.apache.hadoop.fs.Path;
|
2020-04-20 10:24:57 +02:00
|
|
|
import org.slf4j.Logger;
|
|
|
|
import org.slf4j.LoggerFactory;
|
2020-04-17 18:52:39 +02:00
|
|
|
|
2020-04-20 12:00:04 +02:00
|
|
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
|
|
|
|
2020-04-17 18:52:39 +02:00
|
|
|
public class OrcidDSManager {
|
|
|
|
|
2020-04-20 10:24:57 +02:00
|
|
|
private static final Logger logger = LoggerFactory.getLogger(OrcidDSManager.class);
|
2020-04-17 18:52:39 +02:00
|
|
|
|
|
|
|
private String hdfsServerUri;
|
|
|
|
private String hdfsOrcidDefaultPath;
|
|
|
|
private String summariesFileNameTarGz;
|
|
|
|
private String outputAuthorsPath;
|
|
|
|
|
2020-04-20 12:00:04 +02:00
|
|
|
public static void main(String[] args) throws IOException, Exception {
|
2020-04-17 18:52:39 +02:00
|
|
|
logger.info("OrcidDSManager started");
|
|
|
|
OrcidDSManager orcidDSManager = new OrcidDSManager();
|
2020-04-20 12:00:04 +02:00
|
|
|
orcidDSManager.loadArgs(args);
|
|
|
|
orcidDSManager.generateAuthors();
|
2020-04-17 18:52:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
public void generateAuthors() throws Exception {
|
|
|
|
Configuration conf = initConfigurationObject();
|
|
|
|
FileSystem fs = initFileSystemObject(conf);
|
|
|
|
String tarGzUri = hdfsServerUri.concat(hdfsOrcidDefaultPath).concat(summariesFileNameTarGz);
|
|
|
|
logger.info("Started parsing "+tarGzUri);
|
2020-04-20 12:32:03 +02:00
|
|
|
Path outputPath = new Path(hdfsServerUri.concat(hdfsOrcidDefaultPath).concat(outputAuthorsPath).concat(Long.toString(System.currentTimeMillis())).concat("/authors.seq"));
|
2020-04-17 18:52:39 +02:00
|
|
|
SummariesDecompressor.parseGzSummaries(conf, tarGzUri, outputPath);
|
|
|
|
}
|
|
|
|
|
|
|
|
private Configuration initConfigurationObject() {
|
|
|
|
// ====== Init HDFS File System Object
|
|
|
|
Configuration conf = new Configuration();
|
|
|
|
// Set FileSystem URI
|
|
|
|
conf.set("fs.defaultFS", hdfsServerUri.concat(hdfsOrcidDefaultPath));
|
|
|
|
// Because of Maven
|
|
|
|
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
|
|
|
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
|
|
|
return conf;
|
|
|
|
}
|
|
|
|
|
|
|
|
private FileSystem initFileSystemObject(Configuration conf) {
|
|
|
|
//Get the filesystem - HDFS
|
|
|
|
FileSystem fs = null;
|
|
|
|
try {
|
|
|
|
fs = FileSystem.get(URI.create(hdfsServerUri.concat(hdfsOrcidDefaultPath)), conf);
|
|
|
|
} catch (IOException e) {
|
|
|
|
// TODO Auto-generated catch block
|
|
|
|
e.printStackTrace();
|
|
|
|
}
|
|
|
|
return fs;
|
|
|
|
}
|
2020-04-20 12:00:04 +02:00
|
|
|
|
|
|
|
private void loadArgs(String[] args) throws IOException, Exception {
|
|
|
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(OrcidDSManager.class.getResourceAsStream("/eu/dnetlib/dhp/doiboost/create_orcid_authors_data.json")));
|
|
|
|
parser.parseArgument(args);
|
|
|
|
|
|
|
|
final String hdfsServerUri = parser.get("hdfsServerUri");
|
|
|
|
logger.info("HDFS URI: "+hdfsServerUri);
|
|
|
|
Path hdfsOrcidDefaultPath = new Path(parser.get("hdfsOrcidDefaultPath"));
|
|
|
|
logger.info("Default Path: "+hdfsOrcidDefaultPath);
|
|
|
|
final String summariesFileNameTarGz = parser.get("summariesFileNameTarGz");
|
|
|
|
logger.info("Summaries File Name: "+summariesFileNameTarGz);
|
|
|
|
final String outputAuthorsPath = parser.get("summariesFileNameTarGz");
|
|
|
|
logger.info("Output Authors Data: "+outputAuthorsPath);
|
2020-04-17 18:52:39 +02:00
|
|
|
}
|
|
|
|
}
|