updated generate scholix to generate json

This commit is contained in:
Sandro La Bruzzo 2020-03-26 09:40:50 +01:00
parent 0594b92a6d
commit a768226e52
3 changed files with 35 additions and 44 deletions

1
.gitignore vendored
View File

@ -4,6 +4,7 @@
*.ipr *.ipr
*.iml *.iml
*~ *~
.vscode
.classpath .classpath
/*/.classpath /*/.classpath
/*/*/.classpath /*/*/.classpath

View File

@ -1,55 +1,30 @@
package eu.dnetlib.dhp.provision; package eu.dnetlib.dhp.provision;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.provision.scholix.*; import eu.dnetlib.dhp.provision.scholix.*;
import eu.dnetlib.dhp.provision.scholix.summary.*; import eu.dnetlib.dhp.provision.scholix.summary.ScholixSummary;
import eu.dnetlib.dhp.schema.oaf.Relation; import eu.dnetlib.dhp.schema.oaf.Relation;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.api.java.function.PairFlatMapFunction; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.*; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import static org.apache.spark.sql.functions.col; import org.apache.spark.sql.SparkSession;
import scala.Int;
import scala.Tuple2; import scala.Tuple2;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
public class SparkGenerateScholix { public class SparkGenerateScholix {
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGenerateScholix.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_generate_summary_parameters.json"))); final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGenerateScholix.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_generate_summary_parameters.json")));
parser.parseArgument(args); parser.parseArgument(args);
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.set("spark.sql.shuffle.partitions","4000"); conf.set("spark.sql.shuffle.partitions","4000");
// conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
// conf.registerKryoClasses(new Class[]{
// ScholixSummary.class,
// CollectedFromType.class,
// SchemeValue.class,
// TypedIdentifier.class,
// Typology.class,
// Relation.class,
// Scholix.class,
// ScholixCollectedFrom.class,
// ScholixEntityId.class,
// ScholixIdentifier.class,
// ScholixRelationship.class,
// ScholixResource.class
// });
final SparkSession spark = SparkSession final SparkSession spark = SparkSession
.builder() .builder()
.config(conf) .config(conf)
@ -57,6 +32,16 @@ public class SparkGenerateScholix {
.master(parser.get("master")) .master(parser.get("master"))
.getOrCreate(); .getOrCreate();
conf.registerKryoClasses(new Class[]{
Scholix.class,
ScholixCollectedFrom.class,
ScholixEntityId.class,
ScholixIdentifier.class,
ScholixRelationship.class,
ScholixResource.class
});
final String graphPath = parser.get("graphPath"); final String graphPath = parser.get("graphPath");
final String workingDirPath = parser.get("workingDirPath"); final String workingDirPath = parser.get("workingDirPath");
@ -71,12 +56,16 @@ public class SparkGenerateScholix {
.map((MapFunction<Tuple2<ScholixSummary, Relation>, Scholix>) f -> Scholix.generateScholixWithSource(f._1(), f._2()), Encoders.bean(Scholix.class)); .map((MapFunction<Tuple2<ScholixSummary, Relation>, Scholix>) f -> Scholix.generateScholixWithSource(f._1(), f._2()), Encoders.bean(Scholix.class));
firstJoin.write().mode(SaveMode.Overwrite).save(workingDirPath+"/scholix_1"); firstJoin.write().mode(SaveMode.Overwrite).save(workingDirPath+"/scholix_1");
firstJoin = spark.read().load(workingDirPath+"/scholix_1").as(Encoders.bean(Scholix.class));
Dataset<Scholix> scholix_final = spark.read().load(workingDirPath+"/scholix_1").as(Encoders.bean(Scholix.class)); Dataset<Scholix> scholix_final = spark.read().load(workingDirPath+"/scholix_1").as(Encoders.bean(Scholix.class));
scholixSummary
.map((MapFunction<ScholixSummary, ScholixResource>) ScholixResource::fromSummary, Encoders.bean(ScholixResource.class))
.repartition(1000)
.write()
.mode(SaveMode.Overwrite)
.save(workingDirPath+"/scholix_target");
Dataset<ScholixResource> target = spark.read().load(workingDirPath+"/scholix_target").as(Encoders.bean(ScholixResource.class)); Dataset<ScholixResource> target = spark.read().load(workingDirPath+"/scholix_target").as(Encoders.bean(ScholixResource.class));
scholix_final.joinWith(target, scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")), "inner") scholix_final.joinWith(target, scholix_final.col("identifier").equalTo(target.col("dnetIdentifier")), "inner")
@ -87,6 +76,9 @@ public class SparkGenerateScholix {
scholix.generateIdentifier(); scholix.generateIdentifier();
scholix.generatelinkPublisher(); scholix.generatelinkPublisher();
return scholix; return scholix;
}, Encoders.bean(Scholix.class)).repartition(5000).write().mode(SaveMode.Overwrite).save(workingDirPath+"/scholix_index"); }, Encoders.kryo(Scholix.class)).javaRDD().map(s-> {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(s);
}).saveAsTextFile(workingDirPath+"/scholix_json", GzipCodec.class);
} }
} }

View File

@ -33,11 +33,9 @@
<name>idSummary</name> <name>idSummary</name>
<description>number of cores used by single executor</description> <description>number of cores used by single executor</description>
</property> </property>
</parameters> </parameters>
<start to="indexScholix"/> <start to="indexSummary"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
@ -96,12 +94,12 @@
<name>generate Scholix</name> <name>generate Scholix</name>
<class>eu.dnetlib.dhp.provision.SparkGenerateScholix</class> <class>eu.dnetlib.dhp.provision.SparkGenerateScholix</class>
<jar>dhp-graph-provision-${projectVersion}.jar</jar> <jar>dhp-graph-provision-${projectVersion}.jar</jar>
<spark-opts>--executor-memory 9G --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts> <spark-opts>--executor-memory 6G --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts>
<arg>-mt</arg> <arg>yarn-cluster</arg> <arg>-mt</arg> <arg>yarn-cluster</arg>
<arg>--workingDirPath</arg><arg>${workingDirPath}</arg> <arg>--workingDirPath</arg><arg>${workingDirPath}</arg>
<arg>--graphPath</arg><arg>${graphPath}</arg> <arg>--graphPath</arg><arg>${graphPath}</arg>
</spark> </spark>
<ok to="indexScholix"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>
</action> </action>
@ -111,7 +109,7 @@
<name-node>${nameNode}</name-node> <name-node>${nameNode}</name-node>
<master>yarn-cluster</master> <master>yarn-cluster</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>generate Summary</name> <name>index Summary</name>
<class>eu.dnetlib.dhp.provision.SparkIndexCollectionOnES</class> <class>eu.dnetlib.dhp.provision.SparkIndexCollectionOnES</class>
<jar>dhp-graph-provision-${projectVersion}.jar</jar> <jar>dhp-graph-provision-${projectVersion}.jar</jar>
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT} --conf spark.dynamicAllocation.maxExecutors="64" </spark-opts> <spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT} --conf spark.dynamicAllocation.maxExecutors="64" </spark-opts>
@ -134,7 +132,7 @@
<name>index scholix</name> <name>index scholix</name>
<class>eu.dnetlib.dhp.provision.SparkIndexCollectionOnES</class> <class>eu.dnetlib.dhp.provision.SparkIndexCollectionOnES</class>
<jar>dhp-graph-provision-${projectVersion}.jar</jar> <jar>dhp-graph-provision-${projectVersion}.jar</jar>
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT} --conf spark.dynamicAllocation.maxExecutors="16" </spark-opts> <spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT} --conf spark.dynamicAllocation.maxExecutors="8" </spark-opts>
<arg>-mt</arg> <arg>yarn-cluster</arg> <arg>-mt</arg> <arg>yarn-cluster</arg>
<arg>--sourcePath</arg><arg>${workingDirPath}/scholix_json</arg> <arg>--sourcePath</arg><arg>${workingDirPath}/scholix_json</arg>
<arg>--index</arg><arg>${index}_scholix</arg> <arg>--index</arg><arg>${index}_scholix</arg>