code formatting

This commit is contained in:
Claudio Atzori 2020-11-04 18:12:43 +01:00
parent f5f346dd2b
commit 4625b7486e
4 changed files with 103 additions and 108 deletions

View File

@ -1,119 +1,117 @@
package eu.dnetlib.dhp.common;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; package eu.dnetlib.dhp.common;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.hadoop.fs.*;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.Serializable; import java.io.Serializable;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.hadoop.fs.*;
public class MakeTarArchive implements Serializable { public class MakeTarArchive implements Serializable {
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException { private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
Path hdfsWritePath = new Path(outputPath); Path hdfsWritePath = new Path(outputPath);
FSDataOutputStream fsDataOutputStream = null; FSDataOutputStream fsDataOutputStream = null;
if (fileSystem.exists(hdfsWritePath)) { if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, true); fileSystem.delete(hdfsWritePath, true);
} }
fsDataOutputStream = fileSystem.create(hdfsWritePath); fsDataOutputStream = fileSystem.create(hdfsWritePath);
return new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream()); return new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream());
} }
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name) private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name)
throws IOException { throws IOException {
Path hdfsWritePath = new Path(outputPath); Path hdfsWritePath = new Path(outputPath);
FSDataOutputStream fsDataOutputStream = null; FSDataOutputStream fsDataOutputStream = null;
if (fileSystem.exists(hdfsWritePath)) { if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, true); fileSystem.delete(hdfsWritePath, true);
} }
fsDataOutputStream = fileSystem.create(hdfsWritePath); fsDataOutputStream = fileSystem.create(hdfsWritePath);
TarArchiveOutputStream ar = new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream()); TarArchiveOutputStream ar = new TarArchiveOutputStream(fsDataOutputStream.getWrappedStream());
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
.listFiles( .listFiles(
new Path(inputPath), true); new Path(inputPath), true);
while (fileStatusListIterator.hasNext()) { while (fileStatusListIterator.hasNext()) {
writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, 0); writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, 0);
} }
ar.close(); ar.close();
} }
public static void tarMaxSize(FileSystem fileSystem, String inputPath, String outputPath, String dir_name, public static void tarMaxSize(FileSystem fileSystem, String inputPath, String outputPath, String dir_name,
int gBperSplit) throws IOException { int gBperSplit) throws IOException {
final long bytesPerSplit = 1024L * 1024L * 1024L * gBperSplit; final long bytesPerSplit = 1024L * 1024L * 1024L * gBperSplit;
long sourceSize = fileSystem.getContentSummary(new Path(inputPath)).getSpaceConsumed(); long sourceSize = fileSystem.getContentSummary(new Path(inputPath)).getSpaceConsumed();
if (sourceSize < bytesPerSplit) { if (sourceSize < bytesPerSplit) {
write(fileSystem, inputPath, outputPath + ".tar", dir_name); write(fileSystem, inputPath, outputPath + ".tar", dir_name);
} else { } else {
int partNum = 0; int partNum = 0;
RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem RemoteIterator<LocatedFileStatus> fileStatusListIterator = fileSystem
.listFiles( .listFiles(
new Path(inputPath), true); new Path(inputPath), true);
boolean next = fileStatusListIterator.hasNext(); boolean next = fileStatusListIterator.hasNext();
while (next) { while (next) {
TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar"); TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar");
long current_size = 0; long current_size = 0;
while (next && current_size < bytesPerSplit) { while (next && current_size < bytesPerSplit) {
current_size = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, current_size); current_size = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, current_size);
next = fileStatusListIterator.hasNext(); next = fileStatusListIterator.hasNext();
} }
partNum += 1; partNum += 1;
ar.close(); ar.close();
} }
} }
} }
private static long writeCurrentFile(FileSystem fileSystem, String dir_name, private static long writeCurrentFile(FileSystem fileSystem, String dir_name,
RemoteIterator<LocatedFileStatus> fileStatusListIterator, RemoteIterator<LocatedFileStatus> fileStatusListIterator,
TarArchiveOutputStream ar, long current_size) throws IOException { TarArchiveOutputStream ar, long current_size) throws IOException {
LocatedFileStatus fileStatus = fileStatusListIterator.next(); LocatedFileStatus fileStatus = fileStatusListIterator.next();
Path p = fileStatus.getPath(); Path p = fileStatus.getPath();
String p_string = p.toString(); String p_string = p.toString();
if (!p_string.endsWith("_SUCCESS")) { if (!p_string.endsWith("_SUCCESS")) {
String name = p_string.substring(p_string.lastIndexOf("/") + 1); String name = p_string.substring(p_string.lastIndexOf("/") + 1);
if (name.trim().equalsIgnoreCase("communities_infrastructures")) { if (name.trim().equalsIgnoreCase("communities_infrastructures")) {
name = "communities_infrastructures.json"; name = "communities_infrastructures.json";
} }
TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name); TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
entry.setSize(fileStatus.getLen()); entry.setSize(fileStatus.getLen());
current_size += fileStatus.getLen(); current_size += fileStatus.getLen();
ar.putArchiveEntry(entry); ar.putArchiveEntry(entry);
InputStream is = fileSystem.open(fileStatus.getPath()); InputStream is = fileSystem.open(fileStatus.getPath());
BufferedInputStream bis = new BufferedInputStream(is);
int count;
byte data[] = new byte[1024];
while ((count = bis.read(data, 0, data.length)) != -1) {
ar.write(data, 0, count);
}
bis.close();
ar.closeArchiveEntry();
}
return current_size;
}
BufferedInputStream bis = new BufferedInputStream(is);
int count;
byte data[] = new byte[1024];
while ((count = bis.read(data, 0, data.length)) != -1) {
ar.write(data, 0, count);
}
bis.close();
ar.closeArchiveEntry();
}
return current_size;
}
} }

View File

@ -109,20 +109,20 @@ public class CleaningFunctions {
} }
if (Objects.nonNull(r.getPid())) { if (Objects.nonNull(r.getPid())) {
r r
.setPid( .setPid(
r r
.getPid() .getPid()
.stream() .stream()
.filter(Objects::nonNull) .filter(Objects::nonNull)
.filter(sp -> StringUtils.isNotBlank(StringUtils.trim(sp.getValue()))) .filter(sp -> StringUtils.isNotBlank(StringUtils.trim(sp.getValue())))
.filter(sp -> NONE.equalsIgnoreCase(sp.getValue())) .filter(sp -> NONE.equalsIgnoreCase(sp.getValue()))
.filter(sp -> Objects.nonNull(sp.getQualifier())) .filter(sp -> Objects.nonNull(sp.getQualifier()))
.filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid())) .filter(sp -> StringUtils.isNotBlank(sp.getQualifier().getClassid()))
.map(sp -> { .map(sp -> {
sp.setValue(StringUtils.trim(sp.getValue())); sp.setValue(StringUtils.trim(sp.getValue()));
return sp; return sp;
}) })
.collect(Collectors.toList())); .collect(Collectors.toList()));
} }
if (Objects.isNull(r.getResourcetype()) || StringUtils.isBlank(r.getResourcetype().getClassid())) { if (Objects.isNull(r.getResourcetype()) || StringUtils.isBlank(r.getResourcetype().getClassid())) {
r r

View File

@ -4,7 +4,6 @@ package eu.dnetlib.dhp.oa.graph.dump;
import java.io.*; import java.io.*;
import java.util.Optional; import java.util.Optional;
import eu.dnetlib.dhp.common.MakeTarArchive;
import org.apache.commons.compress.archivers.ar.ArArchiveEntry; import org.apache.commons.compress.archivers.ar.ArArchiveEntry;
import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream; import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
@ -16,6 +15,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.MakeTarArchive;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap; import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
public class MakeTar implements Serializable { public class MakeTar implements Serializable {
@ -41,9 +41,10 @@ public class MakeTar implements Serializable {
final String inputPath = parser.get("sourcePath"); final String inputPath = parser.get("sourcePath");
log.info("input path : {}", inputPath); log.info("input path : {}", inputPath);
final int gBperSplit = Optional.ofNullable(parser.get("splitSize")) final int gBperSplit = Optional
.map(Integer::valueOf) .ofNullable(parser.get("splitSize"))
.orElse(10); .map(Integer::valueOf)
.orElse(10);
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.defaultFS", hdfsNameNode);
@ -54,7 +55,8 @@ public class MakeTar implements Serializable {
} }
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit) throws IOException { public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
throws IOException {
RemoteIterator<LocatedFileStatus> dir_iterator = fileSystem.listLocatedStatus(new Path(inputPath)); RemoteIterator<LocatedFileStatus> dir_iterator = fileSystem.listLocatedStatus(new Path(inputPath));
@ -70,5 +72,4 @@ public class MakeTar implements Serializable {
} }
} }

View File

@ -5,7 +5,6 @@ import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
@ -13,14 +12,12 @@ import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson; import com.google.gson.Gson;
import eu.dnetlib.dhp.common.HdfsSupport; import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap; import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
import eu.dnetlib.dhp.oa.graph.dump.graph.Constants; import eu.dnetlib.dhp.oa.graph.dump.graph.Constants;
import eu.dnetlib.dhp.utils.DHPUtils; import eu.dnetlib.dhp.utils.DHPUtils;
import eu.dnetlib.dhp.utils.ISLookupClientFactory; import eu.dnetlib.dhp.utils.ISLookupClientFactory;
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService; import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
@ -74,5 +71,4 @@ public class Utils {
return new Gson().fromJson(sb.toString(), CommunityMap.class); return new Gson().fromJson(sb.toString(), CommunityMap.class);
} }
} }