forked from D-Net/dnet-hadoop
refactoring
This commit is contained in:
parent
6ebc236657
commit
63d74ee379
|
@ -20,9 +20,7 @@ import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
public class ExtractCrossrefRecords {
|
public class ExtractCrossrefRecords {
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
String hdfsServerUri;
|
|
||||||
String workingPath;
|
|
||||||
String crossrefFileNameTarGz;
|
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||||
IOUtils
|
IOUtils
|
||||||
.toString(
|
.toString(
|
||||||
|
@ -30,11 +28,12 @@ public class ExtractCrossrefRecords {
|
||||||
.getResourceAsStream(
|
.getResourceAsStream(
|
||||||
"/eu/dnetlib/dhp/doiboost/crossref_dump_reader.json")));
|
"/eu/dnetlib/dhp/doiboost/crossref_dump_reader.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
hdfsServerUri = parser.get("hdfsServerUri");
|
final String hdfsServerUri = parser.get("hdfsServerUri");
|
||||||
workingPath = parser.get("workingPath");
|
final String workingPath = parser.get("workingPath");
|
||||||
crossrefFileNameTarGz = parser.get("crossrefFileNameTarGz");
|
final String outputPath = parser.get("outputPath");
|
||||||
|
final String crossrefFileNameTarGz = parser.get("crossrefFileNameTarGz");
|
||||||
|
|
||||||
Path hdfsreadpath = new Path(hdfsServerUri.concat(workingPath).concat(crossrefFileNameTarGz));
|
Path hdfsreadpath = new Path(hdfsServerUri.concat(crossrefFileNameTarGz));
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set("fs.defaultFS", hdfsServerUri.concat(workingPath));
|
conf.set("fs.defaultFS", hdfsServerUri.concat(workingPath));
|
||||||
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||||
|
@ -45,11 +44,10 @@ public class ExtractCrossrefRecords {
|
||||||
new GzipCompressorInputStream(crossrefFileStream))) {
|
new GzipCompressorInputStream(crossrefFileStream))) {
|
||||||
TarArchiveEntry entry = null;
|
TarArchiveEntry entry = null;
|
||||||
while ((entry = tais.getNextTarEntry()) != null) {
|
while ((entry = tais.getNextTarEntry()) != null) {
|
||||||
if (entry.isDirectory()) {
|
if (!entry.isDirectory()) {
|
||||||
} else {
|
|
||||||
try (
|
try (
|
||||||
FSDataOutputStream out = fs
|
FSDataOutputStream out = fs
|
||||||
.create(new Path(workingPath.concat("filess/").concat(entry.getName()).concat(".gz")));
|
.create(new Path(outputPath.concat(entry.getName()).concat(".gz")));
|
||||||
GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
|
GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
|
||||||
|
|
||||||
IOUtils.copy(tais, gzipOs);
|
IOUtils.copy(tais, gzipOs);
|
||||||
|
|
Loading…
Reference in New Issue