forked from antonis.lempesis/dnet-hadoop
moved the tar archive with maz size on common module
This commit is contained in:
parent
dabb33e018
commit
d4382b54df
|
@ -0,0 +1,119 @@
|
||||||
|
package eu.dnetlib.dhp.common;
|
||||||
|
|
||||||
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||||
|
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
||||||
|
import org.apache.hadoop.fs.*;
|
||||||
|
|
||||||
|
import java.io.BufferedInputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
public class MakeTarArchive implements Serializable {
|
||||||
|
|
||||||
|
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
|
||||||
|
Path hdfsWritePath = new Path(outputPath);
|
||||||
|
FSDataOutputStream fsDataOutputStream = null;
|
||||||
|
if (fileSystem.exists(hdfsWritePath)) {
|
||||||
|
fileSystem.delete(hdfsWritePath, true);
|
||||||
|
|
||||||
|
}
|
||||||
|
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||||
|
|
||||||
|
return new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
|
Path hdfsWritePath = new Path(outputPath);
|
||||||
|
FSDataOutputStream fsDataOutputStream = null;
|
||||||
|
if (fileSystem.exists(hdfsWritePath)) {
|
||||||
|
fileSystem.delete(hdfsWritePath, true);
|
||||||
|
|
||||||
|
}
|
||||||
|
fsDataOutputStream = fileSystem.create(hdfsWritePath);
|
||||||
|
|
||||||
|
TarArchiveOutputStream ar = new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream());
|
||||||
|
|
||||||
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||||
|
.listFiles(
|
||||||
|
new Path(inputPath), true);
|
||||||
|
|
||||||
|
while (fileStatusListIterator.hasNext()) {
|
||||||
|
writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
ar.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void tarMaxSize(FileSystem fileSystem, String inputPath, String outputPath, String dir_name,
|
||||||
|
int gBperSplit) throws IOException {
|
||||||
|
final long bytesPerSplit = 1024L * 1024L * 1024L * gBperSplit;
|
||||||
|
|
||||||
|
long sourceSize = fileSystem.getContentSummary(new Path(inputPath)).getSpaceConsumed();
|
||||||
|
|
||||||
|
if (sourceSize < bytesPerSplit) {
|
||||||
|
write(fileSystem, inputPath, outputPath + ".tar", dir_name);
|
||||||
|
} else {
|
||||||
|
int partNum = 0;
|
||||||
|
|
||||||
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
|
||||||
|
.listFiles(
|
||||||
|
new Path(inputPath), true);
|
||||||
|
boolean next = fileStatusListIterator.hasNext();
|
||||||
|
while (next) {
|
||||||
|
TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar");
|
||||||
|
|
||||||
|
long current_size = 0;
|
||||||
|
while (next && current_size < bytesPerSplit) {
|
||||||
|
current_size = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, current_size);
|
||||||
|
next = fileStatusListIterator.hasNext();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
partNum += 1;
|
||||||
|
ar.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private static long writeCurrentFile(FileSystem fileSystem, String dir_name,
|
||||||
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator,
|
||||||
|
TarArchiveOutputStream ar, long current_size) throws IOException {
|
||||||
|
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||||
|
|
||||||
|
Path p = fileStatus.getPath();
|
||||||
|
String p_string = p.toString();
|
||||||
|
if (!p_string.endsWith("_SUCCESS")) {
|
||||||
|
String name = p_string.substring(p_string.lastIndexOf("/") + 1);
|
||||||
|
if (name.trim().equalsIgnoreCase("communities_infrastructures")) {
|
||||||
|
name = "communities_infrastructures.json";
|
||||||
|
}
|
||||||
|
TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
|
||||||
|
entry.setSize(fileStatus.getLen());
|
||||||
|
current_size += fileStatus.getLen();
|
||||||
|
ar.putArchiveEntry(entry);
|
||||||
|
|
||||||
|
InputStream is = fileSystem.open(fileStatus.getPath());
|
||||||
|
|
||||||
|
BufferedInputStream bis = new BufferedInputStream(is);
|
||||||
|
|
||||||
|
int count;
|
||||||
|
byte data[] = new byte[1024];
|
||||||
|
while ((count = bis.read(data, 0, data.length)) != -1) {
|
||||||
|
ar.write(data, 0, count);
|
||||||
|
}
|
||||||
|
bis.close();
|
||||||
|
ar.closeArchiveEntry();
|
||||||
|
|
||||||
|
}
|
||||||
|
return current_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"description": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Description of the research community/ research infrastructure"
|
||||||
|
},
|
||||||
|
"id": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "OpenAIRE id of the research community/ research infrastructure"
|
||||||
|
},
|
||||||
|
"name": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The long name of the community"
|
||||||
|
},
|
||||||
|
"originalId": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The acronym of the community"
|
||||||
|
},
|
||||||
|
"subject": {
|
||||||
|
"description": "Description of subject",
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Only for research communities: the list of the subjects associated to the research community"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The type of the record (research community/research infrastructure)"
|
||||||
|
},
|
||||||
|
"zenodo_community": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The id of the Zenodo communities associated to the research community/Research infrastructure"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,38 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"description": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of description"
|
|
||||||
},
|
|
||||||
"id": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of id"
|
|
||||||
},
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of name"
|
|
||||||
},
|
|
||||||
"originalId": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of originalId"
|
|
||||||
},
|
|
||||||
"subject": {
|
|
||||||
"description": "Description of subject",
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of subject"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of type"
|
|
||||||
},
|
|
||||||
"zenodo_community": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Description of zenodo_community"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue