BrBETA_dnet-hadoop/dhp-workflows/dhp-aggregation/src/main/java/eu/dnetlib/dhp/actionmanager/project/utils/ReadExcel.java

102 lines
3.0 KiB
Java
Raw Normal View History

2020-05-19 18:42:50 +02:00
package eu.dnetlib.dhp.actionmanager.project.utils;
2020-05-19 18:42:50 +02:00
import java.io.*;
2020-05-19 18:42:50 +02:00
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
2021-08-11 12:13:22 +02:00
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
2020-05-19 18:42:50 +02:00
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.collection.CollectorException;
import eu.dnetlib.dhp.common.collection.HttpConnector2;
2020-05-19 18:42:50 +02:00
2020-10-05 11:39:55 +02:00
/**
* Applies the parsing of an excel file and writes the Serialization of it in hdfs
*/
public class ReadExcel implements Closeable {
2021-08-11 12:13:22 +02:00
private static final Log log = LogFactory.getLog(ReadExcel.class);
2020-05-19 18:42:50 +02:00
private final BufferedWriter writer;
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
2021-05-14 10:58:12 +02:00
private final InputStream excelFile;
2020-05-19 18:42:50 +02:00
public static void main(final String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils
.toString(
ReadExcel.class
2020-05-19 18:42:50 +02:00
.getResourceAsStream(
"/eu/dnetlib/dhp/actionmanager/project/parameters.json")));
parser.parseArgument(args);
final String fileURL = parser.get("fileURL");
final String hdfsPath = parser.get("hdfsPath");
final String hdfsNameNode = parser.get("hdfsNameNode");
final String classForName = parser.get("classForName");
2021-05-20 15:34:04 +02:00
final String sheetName = parser.get("sheetName");
2020-05-19 18:42:50 +02:00
try (final ReadExcel readExcel = new ReadExcel(hdfsPath, hdfsNameNode, fileURL)) {
2020-05-19 18:42:50 +02:00
log.info("Getting Excel file...");
2021-05-20 15:34:04 +02:00
readExcel.execute(classForName, sheetName);
2020-05-19 18:42:50 +02:00
}
}
2021-08-11 12:13:22 +02:00
public void execute(final String classForName, final String sheetName)
throws IOException, ClassNotFoundException, InvalidFormatException, IllegalAccessException,
InstantiationException {
EXCELParser excelParser = new EXCELParser();
excelParser
2021-05-20 15:34:04 +02:00
.parse(excelFile, classForName, sheetName)
2020-05-19 18:42:50 +02:00
.stream()
2021-08-11 12:13:22 +02:00
.forEach(this::write);
2020-05-19 18:42:50 +02:00
}
@Override
public void close() throws IOException {
writer.close();
}
public ReadExcel(
2020-05-19 18:42:50 +02:00
final String hdfsPath,
final String hdfsNameNode,
2021-08-11 12:13:22 +02:00
final String fileURL) throws CollectorException, IOException {
final Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode);
HttpConnector2 httpConnector = new HttpConnector2();
2021-08-11 12:13:22 +02:00
FileSystem fileSystem = FileSystem.get(conf);
2020-05-19 18:42:50 +02:00
Path hdfsWritePath = new Path(hdfsPath);
2021-08-11 12:13:22 +02:00
2020-05-19 18:42:50 +02:00
if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, false);
2020-05-19 18:42:50 +02:00
}
2021-08-11 12:13:22 +02:00
FSDataOutputStream fos = fileSystem.create(hdfsWritePath);
2020-05-19 18:42:50 +02:00
2021-08-11 12:13:22 +02:00
this.writer = new BufferedWriter(new OutputStreamWriter(fos, StandardCharsets.UTF_8));
this.excelFile = httpConnector.getInputSourceAsStream(fileURL);
2020-05-19 18:42:50 +02:00
}
protected void write(final Object p) {
try {
writer.write(OBJECT_MAPPER.writeValueAsString(p));
writer.newLine();
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
}