forked from D-Net/dnet-hadoop
This commit is contained in:
parent
b113586207
commit
9c7e66c07f
|
@ -0,0 +1,40 @@
|
|||
|
||||
package eu.dnetlib.dhp.common.collection;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
public class DecompressTarGz {
|
||||
|
||||
public static void doExtract(FileSystem fs, String outputPath, String tarGzPath) throws IOException {
|
||||
|
||||
FSDataInputStream inputFileStream = fs.open(new Path(tarGzPath));
|
||||
try (TarArchiveInputStream tais = new TarArchiveInputStream(
|
||||
new GzipCompressorInputStream(inputFileStream))) {
|
||||
TarArchiveEntry entry = null;
|
||||
while ((entry = tais.getNextTarEntry()) != null) {
|
||||
if (!entry.isDirectory()) {
|
||||
try (
|
||||
FSDataOutputStream out = fs
|
||||
.create(new Path(outputPath.concat(entry.getName()).concat(".gz")));
|
||||
GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
|
||||
|
||||
IOUtils.copy(tais, gzipOs);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,19 +1,13 @@
|
|||
|
||||
package eu.dnetlib.doiboost.crossref;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.net.URI;
|
||||
import java.util.zip.GZIPOutputStream;
|
||||
import static eu.dnetlib.dhp.common.collection.DecompressTarGz.doExtract;
|
||||
|
||||
import java.net.URI;
|
||||
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream;
|
||||
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.mortbay.log.Log;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
|
@ -33,31 +27,16 @@ public class ExtractCrossrefRecords {
|
|||
final String outputPath = parser.get("outputPath");
|
||||
final String crossrefFileNameTarGz = parser.get("crossrefFileNameTarGz");
|
||||
|
||||
Path hdfsreadpath = new Path(workingPath.concat("/").concat(crossrefFileNameTarGz));
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", workingPath);
|
||||
conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
|
||||
conf.set("fs.file.impl", org.apache.hadoop.fs.LocalFileSystem.class.getName());
|
||||
FileSystem fs = FileSystem.get(URI.create(workingPath), conf);
|
||||
FSDataInputStream crossrefFileStream = fs.open(hdfsreadpath);
|
||||
try (TarArchiveInputStream tais = new TarArchiveInputStream(
|
||||
new GzipCompressorInputStream(crossrefFileStream))) {
|
||||
TarArchiveEntry entry = null;
|
||||
while ((entry = tais.getNextTarEntry()) != null) {
|
||||
if (!entry.isDirectory()) {
|
||||
try (
|
||||
FSDataOutputStream out = fs
|
||||
.create(new Path(outputPath.concat(entry.getName()).concat(".gz")));
|
||||
GZIPOutputStream gzipOs = new GZIPOutputStream(new BufferedOutputStream(out))) {
|
||||
|
||||
IOUtils.copy(tais, gzipOs);
|
||||
doExtract(fs, outputPath, workingPath.concat("/").concat(crossrefFileNameTarGz));
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
Log.info("Crossref dump reading completed");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="resume_from"/>
|
||||
<start to="DownloadDump"/>
|
||||
|
||||
<decision name="resume_from">
|
||||
<switch>
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.hostedbymap;
|
||||
|
||||
import static eu.dnetlib.dhp.common.collection.DecompressTarGz.doExtract;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||
import eu.dnetlib.dhp.common.collection.CollectorException;
|
||||
import eu.dnetlib.dhp.common.collection.HttpConnector2;
|
||||
|
||||
public class ExtractDoajJson {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ExtractDoajJson.class);
|
||||
|
||||
public static void main(final String[] args) throws Exception {
|
||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
|
||||
IOUtils
|
||||
.toString(
|
||||
Objects
|
||||
.requireNonNull(
|
||||
ExtractDoajJson.class
|
||||
.getResourceAsStream(
|
||||
"/eu/dnetlib/dhp/oa/graph/hostedbymap/download_json_parameters.json"))));
|
||||
|
||||
parser.parseArgument(args);
|
||||
|
||||
final String compressedInput = parser.get("compressedFile");
|
||||
log.info("compressedInput {}", compressedInput);
|
||||
|
||||
final String hdfsNameNode = parser.get("hdfsNameNode");
|
||||
log.info("hdfsNameNode {}", hdfsNameNode);
|
||||
|
||||
final String outputPath = parser.get("outputPath");
|
||||
log.info("outputPath {}", outputPath);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("fs.defaultFS", hdfsNameNode);
|
||||
|
||||
FileSystem fs = FileSystem.get(conf);
|
||||
|
||||
doExtract(fs, outputPath, compressedInput);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
[
|
||||
|
||||
{
|
||||
"paramName":"op",
|
||||
"paramLongName":"outputPath",
|
||||
"paramDescription": "the output json file produced by the CSV downlaod procedure",
|
||||
"paramRequired": true
|
||||
},
|
||||
{
|
||||
"paramName": "hnn",
|
||||
"paramLongName": "hdfsNameNode",
|
||||
"paramDescription": "the path used to store the HostedByMap",
|
||||
"paramRequired": true
|
||||
},{
|
||||
"paramName": "cf",
|
||||
"paramLongName": "compressedFile",
|
||||
"paramDescription": "the path used to store the HostedByMap",
|
||||
"paramRequired": true
|
||||
}
|
||||
]
|
||||
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
#!/bin/bash
|
||||
curl -LSs $1 | hdfs dfs -put - $2/$3
|
|
@ -69,7 +69,7 @@
|
|||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="resume_from"/>
|
||||
<start to="download_doaj_json"/>
|
||||
|
||||
<decision name="resume_from">
|
||||
<switch>
|
||||
|
@ -123,6 +123,41 @@
|
|||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
|
||||
<action name="download_doaj_json">
|
||||
<shell xmlns="uri:oozie:shell-action:0.2">
|
||||
<job-tracker>${jobTracker}</job-tracker>
|
||||
<name-node>${nameNode}</name-node>
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapred.job.queue.name</name>
|
||||
<value>${queueName}</value>
|
||||
</property>
|
||||
</configuration>
|
||||
<exec>download.sh</exec>
|
||||
<argument>${doajJsonFileURL}</argument>
|
||||
<argument>${dumpPath}</argument>
|
||||
<argument>${dumpFileName}</argument>
|
||||
<env-var>HADOOP_USER_NAME=${wf:user()}</env-var>
|
||||
<file>download.sh</file>
|
||||
<capture-output/>
|
||||
</shell>
|
||||
<ok to="extractTarGz"/>
|
||||
<error to="Kill"/>
|
||||
|
||||
</action>
|
||||
|
||||
<action name="extractTarGz">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.hostedbymap.ExtractDoajJson</main-class>
|
||||
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--compressedFile</arg><arg>${dumpPath}/${dumpFileName}</arg>
|
||||
<arg>--outputPath</arg><arg>${workingDir}/DOAJ/</arg>
|
||||
</java>
|
||||
<ok to="End"/>
|
||||
<error to="Kill"/>
|
||||
</action>
|
||||
|
||||
<join name="join_download" to="produceHBM"/>
|
||||
|
||||
<action name="produceHBM">
|
||||
|
|
File diff suppressed because one or more lines are too long
Loading…
Reference in New Issue