52 lines
1.8 KiB
Java
52 lines
1.8 KiB
Java
package eu.dnetlib.jobs;
|
|
|
|
import com.fasterxml.jackson.databind.DeserializationFeature;
|
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
|
import eu.dnetlib.support.ArgumentApplicationParser;
|
|
import org.apache.commons.io.IOUtils;
|
|
import org.apache.hadoop.conf.Configuration;
|
|
import org.apache.hadoop.fs.FileSystem;
|
|
import org.apache.hadoop.fs.Path;
|
|
import org.apache.spark.SparkConf;
|
|
import org.apache.spark.sql.Dataset;
|
|
import org.apache.spark.sql.SaveMode;
|
|
import org.apache.spark.sql.SparkSession;
|
|
|
|
import java.io.BufferedReader;
|
|
import java.io.IOException;
|
|
import java.io.InputStreamReader;
|
|
import java.io.Serializable;
|
|
import java.util.stream.Collectors;
|
|
|
|
public abstract class AbstractSparkJob implements Serializable {
|
|
|
|
protected static final int NUM_PARTITIONS = 1000;
|
|
|
|
protected static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
|
|
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
|
|
|
|
public ArgumentApplicationParser parser; // parameters for the spark action
|
|
public SparkSession spark; // the spark session
|
|
|
|
public AbstractSparkJob() {}
|
|
|
|
public AbstractSparkJob(ArgumentApplicationParser parser, SparkSession spark) {
|
|
|
|
this.parser = parser;
|
|
this.spark = spark;
|
|
}
|
|
|
|
protected abstract void run() throws IOException;
|
|
|
|
protected static SparkSession getSparkSession(SparkConf conf) {
|
|
return SparkSession.builder().config(conf).getOrCreate();
|
|
}
|
|
|
|
protected static <T> void save(Dataset<T> dataset, String outPath, SaveMode mode) {
|
|
dataset.write().option("compression", "gzip").mode(mode).json(outPath);
|
|
}
|
|
|
|
public static String readResource(String path, Class<? extends AbstractSparkJob> clazz) throws IOException {
|
|
return IOUtils.toString(clazz.getResourceAsStream(path));
|
|
}
|
|
} |