moved business logic to new class in common for handling the zip of hte archives

This commit is contained in:
Miriam Baglioni 2020-11-03 16:55:50 +01:00
parent 7d95a5e2b4
commit cbbb1bdc54
1 changed files with 131 additions and 125 deletions

View File

@ -2,7 +2,9 @@
package eu.dnetlib.dhp.oa.graph.dump; package eu.dnetlib.dhp.oa.graph.dump;
import java.io.*; import java.io.*;
import java.util.Optional;
import eu.dnetlib.dhp.common.MakeTarArchive;
import org.apache.commons.compress.archivers.ar.ArArchiveEntry; import org.apache.commons.compress.archivers.ar.ArArchiveEntry;
import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream; import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
@ -39,16 +41,20 @@ public class MakeTar implements Serializable {
final String inputPath = parser.get("sourcePath"); final String inputPath = parser.get("sourcePath");
log.info("input path : {}", inputPath); log.info("input path : {}", inputPath);
final int gBperSplit = Optional.ofNullable(parser.get("splitSize"))
.map(Integer::valueOf)
.orElse(10);
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fileSystem = FileSystem.get(conf); FileSystem fileSystem = FileSystem.get(conf);
makeTArArchive(fileSystem, inputPath, outputPath); makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit);
} }
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath) throws IOException { public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit) throws IOException {
RemoteIterator<LocatedFileStatus> dir_iterator = fileSystem.listLocatedStatus(new Path(inputPath)); RemoteIterator<LocatedFileStatus> dir_iterator = fileSystem.listLocatedStatus(new Path(inputPath));
@ -59,132 +65,132 @@ public class MakeTar implements Serializable {
String p_string = p.toString(); String p_string = p.toString();
String entity = p_string.substring(p_string.lastIndexOf("/") + 1); String entity = p_string.substring(p_string.lastIndexOf("/") + 1);
writeMaxSize(fileSystem, p_string, outputPath + "/" + entity, entity, 10); MakeTarArchive.tarMaxSize(fileSystem, p_string, outputPath + "/" + entity, entity, gBperSplit);
} }
} }
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException { // private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
Path hdfsWritePath = new Path(outputPath); // Path hdfsWritePath = new Path(outputPath);
FSDataOutputStream fsDataOutputStream = null; // FSDataOutputStream fsDataOutputStream = null;
if (fileSystem.exists(hdfsWritePath)) { // if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, true); // fileSystem.delete(hdfsWritePath, true);
//
} // }
fsDataOutputStream = fileSystem.create(hdfsWritePath); // fsDataOutputStream = fileSystem.create(hdfsWritePath);
//
return new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream()); // return new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream());
} // }
//
private static void writeMaxSize(FileSystem fileSystem, String inputPath, String outputPath, String dir_name, // private static void writeMaxSize(FileSystem fileSystem, String inputPath, String outputPath, String dir_name,
int gBperSplit) throws IOException { // int gBperSplit) throws IOException {
final long bytesPerSplit = 1024L * 1024L * 1024L * gBperSplit; // final long bytesPerSplit = 1024L * 1024L * 1024L * gBperSplit;
//
long sourceSize = fileSystem.getContentSummary(new Path(inputPath)).getSpaceConsumed(); // long sourceSize = fileSystem.getContentSummary(new Path(inputPath)).getSpaceConsumed();
//
final long numberOfSplits = sourceSize / bytesPerSplit; // final long numberOfSplits = sourceSize / bytesPerSplit;
//
if (numberOfSplits < 2) { // if (numberOfSplits < 2) {
write(fileSystem, inputPath, outputPath + ".tar", dir_name); // write(fileSystem, inputPath, outputPath + ".tar", dir_name);
} else { // } else {
int partNum = 0; // int partNum = 0;
//
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem // RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
.listFiles( // .listFiles(
new Path(inputPath), true); // new Path(inputPath), true);
boolean next = fileStatusListIterator.hasNext(); // boolean next = fileStatusListIterator.hasNext();
while (sourceSize > 0 && next) { // while (sourceSize > 0 && next) {
TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar"); // TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar");
//
long current_size = 0; // long current_size = 0;
while (next && current_size < bytesPerSplit) { // while (next && current_size < bytesPerSplit) {
LocatedFileStatus fileStatus = fileStatusListIterator.next(); // LocatedFileStatus fileStatus = fileStatusListIterator.next();
//
Path p = fileStatus.getPath(); // Path p = fileStatus.getPath();
String p_string = p.toString(); // String p_string = p.toString();
if (!p_string.endsWith("_SUCCESS")) { // if (!p_string.endsWith("_SUCCESS")) {
String name = p_string.substring(p_string.lastIndexOf("/") + 1); // String name = p_string.substring(p_string.lastIndexOf("/") + 1);
if (name.trim().equalsIgnoreCase("communities_infrastructures")) { // if (name.trim().equalsIgnoreCase("communities_infrastructures")) {
name = "communities_infrastructures.json"; // name = "communities_infrastructures.json";
} // }
TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name); // TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
entry.setSize(fileStatus.getLen()); // entry.setSize(fileStatus.getLen());
current_size += fileStatus.getLen(); // current_size += fileStatus.getLen();
ar.putArchiveEntry(entry); // ar.putArchiveEntry(entry);
//
InputStream is = fileSystem.open(fileStatus.getPath()); // InputStream is = fileSystem.open(fileStatus.getPath());
//
BufferedInputStream bis = new BufferedInputStream(is); // BufferedInputStream bis = new BufferedInputStream(is);
//
int count; // int count;
byte data[] = new byte[1024]; // byte data[] = new byte[1024];
while ((count = bis.read(data, 0, data.length)) != -1) { // while ((count = bis.read(data, 0, data.length)) != -1) {
ar.write(data, 0, count); // ar.write(data, 0, count);
} // }
bis.close(); // bis.close();
ar.closeArchiveEntry(); // ar.closeArchiveEntry();
//
} // }
next = fileStatusListIterator.hasNext(); // next = fileStatusListIterator.hasNext();
//
} // }
sourceSize = sourceSize - current_size; // sourceSize = sourceSize - current_size;
partNum += 1; // partNum += 1;
ar.close(); // ar.close();
} // }
//
} // }
//
} // }
//
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name) // private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name)
throws IOException { // throws IOException {
//
Path hdfsWritePath = new Path(outputPath); // Path hdfsWritePath = new Path(outputPath);
FSDataOutputStream fsDataOutputStream = null; // FSDataOutputStream fsDataOutputStream = null;
if (fileSystem.exists(hdfsWritePath)) { // if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, true); // fileSystem.delete(hdfsWritePath, true);
//
} // }
fsDataOutputStream = fileSystem.create(hdfsWritePath); // fsDataOutputStream = fileSystem.create(hdfsWritePath);
//
TarArchiveOutputStream ar = new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream()); // TarArchiveOutputStream ar = new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream());
//
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem // RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
.listFiles( // .listFiles(
new Path(inputPath), true); // new Path(inputPath), true);
//
while (fileStatusListIterator.hasNext()) { // while (fileStatusListIterator.hasNext()) {
LocatedFileStatus fileStatus = fileStatusListIterator.next(); // LocatedFileStatus fileStatus = fileStatusListIterator.next();
//
Path p = fileStatus.getPath(); // Path p = fileStatus.getPath();
String p_string = p.toString(); // String p_string = p.toString();
if (!p_string.endsWith("_SUCCESS")) { // if (!p_string.endsWith("_SUCCESS")) {
String name = p_string.substring(p_string.lastIndexOf("/") + 1); // String name = p_string.substring(p_string.lastIndexOf("/") + 1);
if (name.trim().equalsIgnoreCase("communities_infrastructures")) { // if (name.trim().equalsIgnoreCase("communities_infrastructures")) {
name = "communities_infrastructures.json"; // name = "communities_infrastructures.json";
} // }
TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name); // TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
entry.setSize(fileStatus.getLen()); // entry.setSize(fileStatus.getLen());
ar.putArchiveEntry(entry); // ar.putArchiveEntry(entry);
//
InputStream is = fileSystem.open(fileStatus.getPath()); // InputStream is = fileSystem.open(fileStatus.getPath());
//
BufferedInputStream bis = new BufferedInputStream(is); // BufferedInputStream bis = new BufferedInputStream(is);
//
int count; // int count;
byte data[] = new byte[1024]; // byte data[] = new byte[1024];
while ((count = bis.read(data, 0, data.length)) != -1) { // while ((count = bis.read(data, 0, data.length)) != -1) {
ar.write(data, 0, count); // ar.write(data, 0, count);
} // }
bis.close(); // bis.close();
ar.closeArchiveEntry(); // ar.closeArchiveEntry();
//
} // }
//
} // }
//
ar.close(); // ar.close();
} // }
} }