forked from D-Net/dnet-hadoop
Merge branch 'doiboost' of code-repo.d4science.org:D-Net/dnet-hadoop into doiboost
This commit is contained in:
commit
7029942e06
|
@ -1,35 +1,30 @@
|
||||||
package eu.dnetlib.doiboost.orcid;
|
package eu.dnetlib.doiboost.orcid;
|
||||||
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.util.Properties;
|
import org.apache.commons.io.IOUtils;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
public class OrcidDSManager {
|
public class OrcidDSManager {
|
||||||
|
|
||||||
private static final Logger logger = LoggerFactory.getLogger(OrcidDSManager.class);
|
private static final Logger logger = LoggerFactory.getLogger(OrcidDSManager.class);
|
||||||
|
|
||||||
private String hdfsServerUri;
|
private String hdfsServerUri;
|
||||||
private String hadoopUsername;
|
|
||||||
private String hdfsOrcidDefaultPath;
|
private String hdfsOrcidDefaultPath;
|
||||||
private String summariesFileNameTarGz;
|
private String summariesFileNameTarGz;
|
||||||
private String outputAuthorsPath;
|
private String outputAuthorsPath;
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) throws IOException, Exception {
|
||||||
logger.info("OrcidDSManager started");
|
logger.info("OrcidDSManager started");
|
||||||
OrcidDSManager orcidDSManager = new OrcidDSManager();
|
OrcidDSManager orcidDSManager = new OrcidDSManager();
|
||||||
try {
|
orcidDSManager.loadArgs(args);
|
||||||
orcidDSManager.initGARRProperties();
|
orcidDSManager.generateAuthors();
|
||||||
orcidDSManager.generateAuthors();
|
|
||||||
} catch (Exception e) {
|
|
||||||
logger.error("Generating authors data: "+e.getMessage());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void generateAuthors() throws Exception {
|
public void generateAuthors() throws Exception {
|
||||||
|
@ -37,7 +32,7 @@ public class OrcidDSManager {
|
||||||
FileSystem fs = initFileSystemObject(conf);
|
FileSystem fs = initFileSystemObject(conf);
|
||||||
String tarGzUri = hdfsServerUri.concat(hdfsOrcidDefaultPath).concat(summariesFileNameTarGz);
|
String tarGzUri = hdfsServerUri.concat(hdfsOrcidDefaultPath).concat(summariesFileNameTarGz);
|
||||||
logger.info("Started parsing "+tarGzUri);
|
logger.info("Started parsing "+tarGzUri);
|
||||||
Path outputPath = new Path(hdfsServerUri.concat(hdfsOrcidDefaultPath).concat(outputAuthorsPath).concat(Long.toString(System.currentTimeMillis())).concat("/authors_part"));
|
Path outputPath = new Path(hdfsServerUri.concat(hdfsOrcidDefaultPath).concat(outputAuthorsPath).concat(Long.toString(System.currentTimeMillis())).concat("/authors.seq"));
|
||||||
SummariesDecompressor.parseGzSummaries(conf, tarGzUri, outputPath);
|
SummariesDecompressor.parseGzSummaries(conf, tarGzUri, outputPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,9 +44,6 @@ public class OrcidDSManager {
|
||||||
// Because of Maven
|
// Because of Maven
|
||||||
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||||
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
||||||
// Set HADOOP user
|
|
||||||
System.setProperty("HADOOP_USER_NAME", hadoopUsername);
|
|
||||||
System.setProperty("hadoop.home.dir", "/");
|
|
||||||
return conf;
|
return conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,33 +59,17 @@ public class OrcidDSManager {
|
||||||
return fs;
|
return fs;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void loadProperties() throws FileNotFoundException, IOException {
|
private void loadArgs(String[] args) throws IOException, Exception {
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(OrcidDSManager.class.getResourceAsStream("/eu/dnetlib/dhp/doiboost/create_orcid_authors_data.json")));
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
Properties appProps = new Properties();
|
final String hdfsServerUri = parser.get("hdfsServerUri");
|
||||||
ClassLoader classLoader = ClassLoader.getSystemClassLoader();
|
logger.info("HDFS URI: "+hdfsServerUri);
|
||||||
appProps.load(classLoader.getResourceAsStream("orciddsmanager/props/app.properties"));
|
Path hdfsOrcidDefaultPath = new Path(parser.get("hdfsOrcidDefaultPath"));
|
||||||
hdfsServerUri = appProps.getProperty("hdfs.server.uri");
|
logger.info("Default Path: "+hdfsOrcidDefaultPath);
|
||||||
hadoopUsername = appProps.getProperty("hdfs.hadoopusername");
|
final String summariesFileNameTarGz = parser.get("summariesFileNameTarGz");
|
||||||
hdfsOrcidDefaultPath = appProps.getProperty("hdfs.orcid.defaultpath");
|
logger.info("Summaries File Name: "+summariesFileNameTarGz);
|
||||||
summariesFileNameTarGz = appProps.getProperty("hdfs.orcid.summariesfilename.tar.gz");
|
final String outputAuthorsPath = parser.get("summariesFileNameTarGz");
|
||||||
outputAuthorsPath = appProps.getProperty("hdfs.orcid.output.authorspath");
|
logger.info("Output Authors Data: "+outputAuthorsPath);
|
||||||
}
|
|
||||||
|
|
||||||
private void initDefaultProperties() throws FileNotFoundException, IOException {
|
|
||||||
|
|
||||||
hdfsServerUri = "hdfs://localhost:9000";
|
|
||||||
hadoopUsername = "enrico.ottonello";
|
|
||||||
hdfsOrcidDefaultPath = "/user/enrico.ottonello/orcid/";
|
|
||||||
summariesFileNameTarGz = "ORCID_2019_summaries.tar.gz";
|
|
||||||
outputAuthorsPath = "output/";
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initGARRProperties() throws FileNotFoundException, IOException {
|
|
||||||
|
|
||||||
hdfsServerUri = "hdfs://hadoop-rm1.garr-pa1.d4science.org:8020";
|
|
||||||
hadoopUsername = "root";
|
|
||||||
hdfsOrcidDefaultPath = "/data/orcid_summaries/";
|
|
||||||
summariesFileNameTarGz = "ORCID_2019_summaries.tar.gz";
|
|
||||||
outputAuthorsPath = "output/";
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -122,14 +122,14 @@ public class SummariesDecompressor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((counter % 1000) == 0) {
|
if ((counter % 100000) == 0) {
|
||||||
logger.info("Current xml records parsed: "+counter);
|
logger.info("Current xml records parsed: "+counter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
logger.error("Parsing record from gzip archive: "+e.getMessage());
|
logger.error("Parsing record from gzip archive: "+e.getMessage());
|
||||||
e.printStackTrace();
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
logger.info("Summaries parse completed");
|
logger.info("Summaries parse completed");
|
||||||
logger.info("Total XML records parsed: "+counter);
|
logger.info("Total XML records parsed: "+counter);
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
[
|
||||||
|
{"paramName":"n", "paramLongName":"hdfsServerUri", "paramDescription": "the server uri", "paramRequired": true},
|
||||||
|
{"paramName":"d", "paramLongName":"hdfsOrcidDefaultPath", "paramDescription": "the default work path", "paramRequired": true},
|
||||||
|
{"paramName":"f", "paramLongName":"summariesFileNameTarGz", "paramDescription": "the name of the summaries orcid file", "paramRequired": true},
|
||||||
|
{"paramName":"o", "paramLongName":"outputAuthorsPath", "paramDescription": "the relative folder of the sequencial file to write", "paramRequired": true}
|
||||||
|
]
|
|
@ -29,10 +29,10 @@
|
||||||
<job-tracker>${jobTracker}</job-tracker>
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
<name-node>${nameNode}</name-node>
|
<name-node>${nameNode}</name-node>
|
||||||
<main-class>eu.dnetlib.doiboost.orcid.OrcidDSManager</main-class>
|
<main-class>eu.dnetlib.doiboost.orcid.OrcidDSManager</main-class>
|
||||||
<!-- ENRICO FILL THE RIGHT ARG SECTION -->
|
<arg>-d</arg><arg>${workingPath}/</arg>
|
||||||
<arg>-t</arg><arg>${workingPath}/input/crossref/index_dump</arg>
|
|
||||||
<arg>-n</arg><arg>${nameNode}</arg>
|
<arg>-n</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>-f</arg><arg>ORCID_2019_summaries.tar.gz</arg>
|
||||||
|
<arg>-o</arg><arg>output/</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
Loading…
Reference in New Issue