1
0
Fork 0

Merge branch 'dhp_oaf_model' of https://code-repo.d4science.org/D-Net/dnet-hadoop into dhp_oaf_model

This commit is contained in:
Claudio Atzori 2020-06-16 15:43:05 +02:00
commit b1dcb09c90
6 changed files with 131 additions and 137 deletions

View File

@ -57,7 +57,7 @@ public class PrepareProjects {
final String outputPath = parser.get("outputPath"); final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath); log.info("outputPath {}: ", outputPath);
final String dbProjectPath = parser.get("dbProjectPath"); final String dbProjectPath = parser.get("dbProjectPath");
log.info("dbProjectPath {}: ", dbProjectPath); log.info("dbProjectPath {}: ", dbProjectPath);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
@ -75,56 +75,39 @@ public class PrepareProjects {
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
} }
private static void exec(SparkSession spark, String progjectPath, String dbProjectPath, String outputPath) { private static void exec(SparkSession spark, String projectPath, String dbProjectPath, String outputPath) {
Dataset<CSVProject> project = readPath(spark, progjectPath, CSVProject.class); Dataset<CSVProject> project = readPath(spark, projectPath, CSVProject.class);
Dataset<ProjectSubset> dbProjects = readPath(spark, dbProjectPath, ProjectSubset.class); Dataset<ProjectSubset> dbProjects = readPath(spark, dbProjectPath, ProjectSubset.class);
dbProjects.joinWith(project, dbProjects.col("code").equalTo(project.col("id")), "left") dbProjects
.flatMap((FlatMapFunction<Tuple2<ProjectSubset, CSVProject>, CSVProject>) value -> { .joinWith(project, dbProjects.col("code").equalTo(project.col("id")), "left")
Optional<CSVProject> csvProject = Optional.ofNullable(value._2()); .flatMap(getTuple2CSVProjectFlatMapFunction(), Encoders.bean(CSVProject.class))
if(! csvProject.isPresent()){ .filter(Objects::nonNull)
return null; .write()
} .mode(SaveMode.Overwrite)
List<CSVProject> csvProjectList = new ArrayList<>(); .option("compression", "gzip")
String[] programme = csvProject.get().getProgramme().split(";"); .json(outputPath);
Arrays
.stream(programme)
.forEach(p -> {
CSVProject proj = new CSVProject();
proj.setProgramme(p);
proj.setId(csvProject.get().getId());
csvProjectList.add(proj);
});
return csvProjectList.iterator(); }
}, Encoders.bean(CSVProject.class))
.write()
.mode(SaveMode.Overwrite)
.option("compression", "gzip")
.json(outputPath);
//
// .map(value -> {
// Optional<CSVProject> csvProject = Optional.ofNullable(value._2());
// }, Encoders.bean(CSVProject.class))
// .filter(Objects::nonNull)
// .toJavaRDD()
// .flatMap(p -> {
// List<CSVProject> csvProjectList = new ArrayList<>();
// String[] programme = p.getProgramme().split(";");
// Arrays
// .stream(programme)
// .forEach(value -> {
// CSVProject csvProject = new CSVProject();
// csvProject.setProgramme(value);
// csvProject.setId(p.getId());
// csvProjectList.add(csvProject);
// });
//
// return csvProjectList.iterator();
// })
// .map(p -> OBJECT_MAPPER.writeValueAsString(p))
// .saveAsTextFile(outputPath);
private static FlatMapFunction<Tuple2<ProjectSubset, CSVProject>, CSVProject> getTuple2CSVProjectFlatMapFunction() {
return (FlatMapFunction<Tuple2<ProjectSubset, CSVProject>, CSVProject>) value -> {
Optional<CSVProject> csvProject = Optional.ofNullable(value._2());
List<CSVProject> csvProjectList = new ArrayList<>();
if (csvProject.isPresent()) {
String[] programme = csvProject.get().getProgramme().split(";");
Arrays
.stream(programme)
.forEach(p -> {
CSVProject proj = new CSVProject();
proj.setProgramme(p);
proj.setId(csvProject.get().getId());
csvProjectList.add(proj);
});
}
return csvProjectList.iterator();
};
} }
public static <R> Dataset<R> readPath( public static <R> Dataset<R> readPath(

View File

@ -1,15 +1,5 @@
package eu.dnetlib.dhp.actionmanager.project;
import com.fasterxml.jackson.databind.ObjectMapper; package eu.dnetlib.dhp.actionmanager.project;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.DbClient;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.BufferedWriter; import java.io.BufferedWriter;
import java.io.Closeable; import java.io.Closeable;
@ -22,92 +12,104 @@ import java.util.List;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function; import java.util.function.Function;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.DbClient;
public class ReadProjectsFromDB implements Closeable { public class ReadProjectsFromDB implements Closeable {
private final DbClient dbClient; private final DbClient dbClient;
private static final Log log = LogFactory.getLog(ReadProjectsFromDB.class); private static final Log log = LogFactory.getLog(ReadProjectsFromDB.class);
private final Configuration conf; private final Configuration conf;
private final BufferedWriter writer; private final BufferedWriter writer;
private final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final static String query = "SELECT code " + private final static String query = "SELECT code " +
"from projects where id like 'corda__h2020%' " ; "from projects where id like 'corda__h2020%' ";
public static void main(final String[] args) throws Exception { public static void main(final String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser( final ArgumentApplicationParser parser = new ArgumentApplicationParser(
IOUtils IOUtils
.toString( .toString(
ReadProjectsFromDB.class ReadProjectsFromDB.class
.getResourceAsStream( .getResourceAsStream(
"/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json"))); "/eu/dnetlib/dhp/actionmanager/project/read_projects_db.json")));
parser.parseArgument(args); parser.parseArgument(args);
final String dbUrl = parser.get("postgresUrl"); final String dbUrl = parser.get("postgresUrl");
final String dbUser = parser.get("postgresUser"); final String dbUser = parser.get("postgresUser");
final String dbPassword = parser.get("postgresPassword"); final String dbPassword = parser.get("postgresPassword");
final String hdfsPath = parser.get("hdfsPath") ; final String hdfsPath = parser.get("hdfsPath");
final String hdfsNameNode = parser.get("hdfsNameNode"); final String hdfsNameNode = parser.get("hdfsNameNode");
try (final ReadProjectsFromDB rbl = new ReadProjectsFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser, try (final ReadProjectsFromDB rbl = new ReadProjectsFromDB(hdfsPath, hdfsNameNode, dbUrl, dbUser,
dbPassword)) { dbPassword)) {
log.info("Processing blacklist..."); log.info("Processing projects...");
rbl.execute(query, rbl::processProjectsEntry); rbl.execute(query, rbl::processProjectsEntry);
} }
} }
public void execute(final String sql, final Function<ResultSet, List<ProjectSubset>> producer) throws Exception {
final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeProject(r)); public void execute(final String sql, final Function<ResultSet, List<ProjectSubset>> producer) throws Exception {
dbClient.processResults(sql, consumer); final Consumer<ResultSet> consumer = rs -> producer.apply(rs).forEach(r -> writeProject(r));
}
public List<ProjectSubset> processProjectsEntry(ResultSet rs) { dbClient.processResults(sql, consumer);
try { }
ProjectSubset p = new ProjectSubset();
p.setCode(rs.getString("code"));
return Arrays.asList(p); public List<ProjectSubset> processProjectsEntry(ResultSet rs) {
try {
ProjectSubset p = new ProjectSubset();
p.setCode(rs.getString("code"));
} catch (final Exception e) { return Arrays.asList(p);
throw new RuntimeException(e);
}
}
protected void writeProject(final ProjectSubset r) { } catch (final Exception e) {
try { throw new RuntimeException(e);
writer.write(OBJECT_MAPPER.writeValueAsString(r)); }
writer.newLine(); }
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
public ReadProjectsFromDB( protected void writeProject(final ProjectSubset r) {
final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword) try {
throws Exception { writer.write(OBJECT_MAPPER.writeValueAsString(r));
writer.newLine();
} catch (final Exception e) {
throw new RuntimeException(e);
}
}
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword); public ReadProjectsFromDB(
this.conf = new Configuration(); final String hdfsPath, String hdfsNameNode, final String dbUrl, final String dbUser, final String dbPassword)
this.conf.set("fs.defaultFS", hdfsNameNode); throws Exception {
FileSystem fileSystem = FileSystem.get(this.conf);
Path hdfsWritePath = new Path(hdfsPath);
FSDataOutputStream fsDataOutputStream = null;
if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, false);
}
fsDataOutputStream = fileSystem.create(hdfsWritePath);
this.dbClient = new DbClient(dbUrl, dbUser, dbPassword);
this.conf = new Configuration();
this.conf.set("fs.defaultFS", hdfsNameNode);
FileSystem fileSystem = FileSystem.get(this.conf);
Path hdfsWritePath = new Path(hdfsPath);
FSDataOutputStream fsDataOutputStream = null;
if (fileSystem.exists(hdfsWritePath)) {
fileSystem.delete(hdfsWritePath, false);
}
fsDataOutputStream = fileSystem.create(hdfsWritePath);
this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8)); this.writer = new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
} }
@Override @Override
public void close() throws IOException { public void close() throws IOException {
dbClient.close(); dbClient.close();
writer.close(); writer.close();
} }
} }

View File

@ -96,7 +96,7 @@
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>PrepareProgramme</name> <name>PrepareProjects</name>
<class>eu.dnetlib.dhp.actionmanager.project.PrepareProjects</class> <class>eu.dnetlib.dhp.actionmanager.project.PrepareProjects</class>
<jar>dhp-aggregation-${projectVersion}.jar</jar> <jar>dhp-aggregation-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
@ -111,6 +111,7 @@
</spark-opts> </spark-opts>
<arg>--projectPath</arg><arg>${workingDir}/projects</arg> <arg>--projectPath</arg><arg>${workingDir}/projects</arg>
<arg>--outputPath</arg><arg>${workingDir}/preparedProjects</arg> <arg>--outputPath</arg><arg>${workingDir}/preparedProjects</arg>
<arg>--dbProjectPath</arg><arg>${workingDir}/dbProjects</arg>
</spark> </spark>
<ok to="create_updates"/> <ok to="create_updates"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -67,7 +67,7 @@ public class PrepareProjectTest {
} }
@Test @Test
public void numberDistinctProgrammeTest() throws Exception { public void numberDistinctProjectTest() throws Exception {
PrepareProjects PrepareProjects
.main( .main(
new String[] { new String[] {
@ -76,7 +76,10 @@ public class PrepareProjectTest {
"-projectPath", "-projectPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_subset.json").getPath(), getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/projects_subset.json").getPath(),
"-outputPath", "-outputPath",
workingDir.toString() + "/preparedProjects" workingDir.toString() + "/preparedProjects",
"-dbProjectPath",
getClass().getResource("/eu/dnetlib/dhp/actionmanager/project/dbProject").getPath(),
}); });
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
@ -85,7 +88,7 @@ public class PrepareProjectTest {
.textFile(workingDir.toString() + "/preparedProjects") .textFile(workingDir.toString() + "/preparedProjects")
.map(item -> OBJECT_MAPPER.readValue(item, CSVProject.class)); .map(item -> OBJECT_MAPPER.readValue(item, CSVProject.class));
Assertions.assertEquals(20, tmp.count()); Assertions.assertEquals(8, tmp.count());
Dataset<CSVProject> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProject.class)); Dataset<CSVProject> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProject.class));

View File

@ -5,7 +5,6 @@ import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
@ -21,6 +20,7 @@ import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.oaf.Project; import eu.dnetlib.dhp.schema.oaf.Project;
public class SparkUpdateProjectTest { public class SparkUpdateProjectTest {
@ -84,15 +84,12 @@ public class SparkUpdateProjectTest {
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
JavaRDD<Project> tmp = sc JavaRDD<Project> tmp = sc
.sequenceFile(workingDir.toString() + "/actionSet", Text.class, Text.class) .sequenceFile(workingDir.toString() + "/actionSet", Text.class, Text.class)
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class)) .map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
.map(aa -> ((Project)aa.getPayload())) .map(aa -> ((Project) aa.getPayload()));
;
Assertions.assertEquals(14, tmp.count()); Assertions.assertEquals(14, tmp.count());
// Dataset<CSVProgramme> verificationDataset = spark.createDataset(tmp.rdd(), Encoders.bean(CSVProgramme.class));
//
// Assertions.assertEquals(0, verificationDataset.filter("shortTitle =''").count());
} }
} }

View File

@ -0,0 +1,8 @@
{"code":"894593"}
{"code":"897004"}
{"code":"896300"}
{"code":"892890"}
{"code":"886828"}
{"code":"8867767"}
{"code":"101003374"}
{"code":"886776"}