added workflow parameters
This commit is contained in:
parent
eef60bb9f4
commit
4ae55e3891
|
@ -1,35 +1,30 @@
|
|||
package eu.dnetlib.doiboost.orcid;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
|
||||
public class OrcidDSManager {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(OrcidDSManager.class);
|
||||
|
||||
private String hdfsServerUri;
|
||||
private String hadoopUsername;
|
||||
private String hdfsOrcidDefaultPath;
|
||||
private String summariesFileNameTarGz;
|
||||
private String outputAuthorsPath;
|
||||
|
||||
public static void main(String[] args) {
|
||||
public static void main(String[] args) throws IOException, Exception {
|
||||
logger.info("OrcidDSManager started");
|
||||
OrcidDSManager orcidDSManager = new OrcidDSManager();
|
||||
try {
|
||||
orcidDSManager.initGARRProperties();
|
||||
orcidDSManager.loadArgs(args);
|
||||
orcidDSManager.generateAuthors();
|
||||
} catch (Exception e) {
|
||||
logger.error("Generating authors data: "+e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void generateAuthors() throws Exception {
|
||||
|
@ -49,9 +44,6 @@ public class OrcidDSManager {
|
|||
// Because of Maven
|
||||
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
||||
// Set HADOOP user
|
||||
System.setProperty("HADOOP_USER_NAME", hadoopUsername);
|
||||
System.setProperty("hadoop.home.dir", "/");
|
||||
return conf;
|
||||
}
|
||||
|
||||
|
@ -67,33 +59,17 @@ public class OrcidDSManager {
|
|||
return fs;
|
||||
}
|
||||
|
||||
private void loadProperties() throws FileNotFoundException, IOException {
|
||||
private void loadArgs(String[] args) throws IOException, Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(OrcidDSManager.class.getResourceAsStream("/eu/dnetlib/dhp/doiboost/create_orcid_authors_data.json")));
|
||||
parser.parseArgument(args);
|
||||
|
||||
Properties appProps = new Properties();
|
||||
ClassLoader classLoader = ClassLoader.getSystemClassLoader();
|
||||
appProps.load(classLoader.getResourceAsStream("orciddsmanager/props/app.properties"));
|
||||
hdfsServerUri = appProps.getProperty("hdfs.server.uri");
|
||||
hadoopUsername = appProps.getProperty("hdfs.hadoopusername");
|
||||
hdfsOrcidDefaultPath = appProps.getProperty("hdfs.orcid.defaultpath");
|
||||
summariesFileNameTarGz = appProps.getProperty("hdfs.orcid.summariesfilename.tar.gz");
|
||||
outputAuthorsPath = appProps.getProperty("hdfs.orcid.output.authorspath");
|
||||
}
|
||||
|
||||
private void initDefaultProperties() throws FileNotFoundException, IOException {
|
||||
|
||||
hdfsServerUri = "hdfs://localhost:9000";
|
||||
hadoopUsername = "enrico.ottonello";
|
||||
hdfsOrcidDefaultPath = "/user/enrico.ottonello/orcid/";
|
||||
summariesFileNameTarGz = "ORCID_2019_summaries.tar.gz";
|
||||
outputAuthorsPath = "output/";
|
||||
}
|
||||
|
||||
private void initGARRProperties() throws FileNotFoundException, IOException {
|
||||
|
||||
hdfsServerUri = "hdfs://hadoop-rm1.garr-pa1.d4science.org:8020";
|
||||
hadoopUsername = "root";
|
||||
hdfsOrcidDefaultPath = "/data/orcid_summaries/";
|
||||
summariesFileNameTarGz = "ORCID_2019_summaries.tar.gz";
|
||||
outputAuthorsPath = "output/";
|
||||
final String hdfsServerUri = parser.get("hdfsServerUri");
|
||||
logger.info("HDFS URI: "+hdfsServerUri);
|
||||
Path hdfsOrcidDefaultPath = new Path(parser.get("hdfsOrcidDefaultPath"));
|
||||
logger.info("Default Path: "+hdfsOrcidDefaultPath);
|
||||
final String summariesFileNameTarGz = parser.get("summariesFileNameTarGz");
|
||||
logger.info("Summaries File Name: "+summariesFileNameTarGz);
|
||||
final String outputAuthorsPath = parser.get("summariesFileNameTarGz");
|
||||
logger.info("Output Authors Data: "+outputAuthorsPath);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,14 +122,14 @@ public class SummariesDecompressor {
|
|||
}
|
||||
}
|
||||
|
||||
if ((counter % 1000) == 0) {
|
||||
if ((counter % 100000) == 0) {
|
||||
logger.info("Current xml records parsed: "+counter);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("Parsing record from gzip archive: "+e.getMessage());
|
||||
e.printStackTrace();
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
logger.info("Summaries parse completed");
|
||||
logger.info("Total XML records parsed: "+counter);
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
[
|
||||
{"paramName":"n", "paramLongName":"hdfsServerUri", "paramDescription": "the server uri", "paramRequired": true},
|
||||
{"paramName":"d", "paramLongName":"hdfsOrcidDefaultPath", "paramDescription": "the default work path", "paramRequired": true},
|
||||
{"paramName":"f", "paramLongName":"summariesFileNameTarGz", "paramDescription": "the name of the summaries orcid file", "paramRequired": true},
|
||||
{"paramName":"o", "paramLongName":"outputAuthorsPath", "paramDescription": "the relative folder of the sequencial file to write", "paramRequired": true}
|
||||
]
|
|
@ -29,10 +29,10 @@
|
|||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<main-class>eu.dnetlib.doiboost.orcid.OrcidDSManager</main-class>
|
||||
<!-- ENRICO FILL THE RIGHT ARG SECTION -->
|
||||
<arg>-t</arg><arg>${workingPath}/input/crossref/index_dump</arg>
|
||||
<arg>-d</arg><arg>${workingPath}/</arg>
|
||||
<arg>-n</arg><arg>${nameNode}</arg>
|
||||
|
||||
<arg>-f</arg><arg>ORCID_2019_summaries.tar.gz</arg>
|
||||
<arg>-o</arg><arg>output/</arg>
|
||||
</java>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
|
|
Loading…
Reference in New Issue