opendoar partition

This commit is contained in:
Michele Artini 2020-09-25 09:02:58 +02:00
parent 9e681609fd
commit c96598aaa4
5 changed files with 58 additions and 22 deletions

View File

@ -0,0 +1,25 @@
package eu.dnetlib.dhp.broker.model;
import java.io.Serializable;
import eu.dnetlib.broker.api.ShortEventMessage;
public class ShortEventMessageWithGroupId extends ShortEventMessage implements Serializable {
/**
*
*/
private static final long serialVersionUID = 4704889388757626630L;
private String group;
public String getGroup() {
return group;
}
public void setGroup(final String group) {
this.group = group;
}
}

View File

@ -14,6 +14,7 @@ import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.HttpClients;
import org.apache.spark.SparkConf; import org.apache.spark.SparkConf;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.TypedColumn; import org.apache.spark.sql.TypedColumn;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -74,6 +75,7 @@ public class GenerateStatsJob {
.agg(aggr) .agg(aggr)
.map(t -> t._2, Encoders.bean(DatasourceStats.class)) .map(t -> t._2, Encoders.bean(DatasourceStats.class))
.write() .write()
.mode(SaveMode.Overwrite)
.jdbc(dbUrl, "oa_datasource_stats_temp", connectionProperties); .jdbc(dbUrl, "oa_datasource_stats_temp", connectionProperties);
log.info("*** updateStats"); log.info("*** updateStats");

View File

@ -20,12 +20,11 @@ import org.slf4j.LoggerFactory;
import com.google.gson.Gson; import com.google.gson.Gson;
import eu.dnetlib.broker.api.ShortEventMessage;
import eu.dnetlib.broker.objects.OaBrokerEventPayload; import eu.dnetlib.broker.objects.OaBrokerEventPayload;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.broker.model.Event; import eu.dnetlib.dhp.broker.model.Event;
import eu.dnetlib.dhp.broker.model.ShortEventMessageWithGroupId;
import eu.dnetlib.dhp.broker.oa.util.ClusterUtils; import eu.dnetlib.dhp.broker.oa.util.ClusterUtils;
import scala.Tuple2;
public class PartitionEventsByDsIdJob { public class PartitionEventsByDsIdJob {
@ -61,13 +60,11 @@ public class PartitionEventsByDsIdJob {
.readPath(spark, eventsPath, Event.class) .readPath(spark, eventsPath, Event.class)
.filter(e -> StringUtils.isNotBlank(e.getMap().getTargetDatasourceId())) .filter(e -> StringUtils.isNotBlank(e.getMap().getTargetDatasourceId()))
.filter(e -> e.getMap().getTargetDatasourceId().contains(OPENDOAR_NSPREFIX)) .filter(e -> e.getMap().getTargetDatasourceId().contains(OPENDOAR_NSPREFIX))
.map( .limit(10000)
e -> new Tuple2<>( .map(e -> messageFromNotification(e), Encoders.bean(ShortEventMessageWithGroupId.class))
StringUtils.substringAfter(e.getMap().getTargetDatasourceId(), OPENDOAR_NSPREFIX), .coalesce(1)
messageFromNotification(e)),
Encoders.tuple(Encoders.STRING(), Encoders.bean(ShortEventMessage.class)))
.write() .write()
.partitionBy("_1") .partitionBy("group")
.mode(SaveMode.Overwrite) .mode(SaveMode.Overwrite)
.json(partitionPath); .json(partitionPath);
@ -77,7 +74,6 @@ public class PartitionEventsByDsIdJob {
} }
private static void renameSubDirs(final String path) throws IOException { private static void renameSubDirs(final String path) throws IOException {
final String prefix = "_1=";
final FileSystem fs = FileSystem.get(new Configuration()); final FileSystem fs = FileSystem.get(new Configuration());
log.info("** Renaming subdirs of " + path); log.info("** Renaming subdirs of " + path);
@ -85,8 +81,8 @@ public class PartitionEventsByDsIdJob {
if (fileStatus.isDirectory()) { if (fileStatus.isDirectory()) {
final Path oldPath = fileStatus.getPath(); final Path oldPath = fileStatus.getPath();
final String oldName = oldPath.getName(); final String oldName = oldPath.getName();
if (oldName.startsWith(prefix)) { if (oldName.contains("=")) {
final Path newPath = new Path(path + "/" + StringUtils.substringAfter(oldName, prefix)); final Path newPath = new Path(path + "/" + StringUtils.substringAfter(oldName, "="));
log.info(" * " + oldPath.getName() + " -> " + newPath.getName()); log.info(" * " + oldPath.getName() + " -> " + newPath.getName());
fs.rename(oldPath, newPath); fs.rename(oldPath, newPath);
} }
@ -94,18 +90,19 @@ public class PartitionEventsByDsIdJob {
} }
} }
private static ShortEventMessage messageFromNotification(final Event e) { private static ShortEventMessageWithGroupId messageFromNotification(final Event e) {
final Gson gson = new Gson(); final Gson gson = new Gson();
final OaBrokerEventPayload payload = gson.fromJson(e.getPayload(), OaBrokerEventPayload.class); final OaBrokerEventPayload payload = gson.fromJson(e.getPayload(), OaBrokerEventPayload.class);
final ShortEventMessage res = new ShortEventMessage(); final ShortEventMessageWithGroupId res = new ShortEventMessageWithGroupId();
res.setOriginalId(payload.getResult().getOriginalId()); res.setOriginalId(payload.getResult().getOriginalId());
res.setTitle(payload.getResult().getTitles().stream().filter(StringUtils::isNotBlank).findFirst().orElse(null)); res.setTitle(payload.getResult().getTitles().stream().filter(StringUtils::isNotBlank).findFirst().orElse(null));
res.setTopic(e.getTopic()); res.setTopic(e.getTopic());
res.setTrust(payload.getTrust()); res.setTrust(payload.getTrust());
res.generateMessageFromObject(payload.getHighlight()); res.generateMessageFromObject(payload.getHighlight());
res.setGroup(StringUtils.substringAfter(e.getMap().getTargetDatasourceId(), OPENDOAR_NSPREFIX));
return res; return res;
} }

View File

@ -44,6 +44,18 @@
<name>brokerApiBaseUrl</name> <name>brokerApiBaseUrl</name>
<description>the url of the broker service api</description> <description>the url of the broker service api</description>
</property> </property>
<property>
<name>brokerDbUrl</name>
<description>the url of the broker database</description>
</property>
<property>
<name>brokerDbUser</name>
<description>the user of the broker database</description>
</property>
<property>
<name>brokerDbPassword</name>
<description>the password of the broker database</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -509,8 +521,11 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840 --conf spark.sql.shuffle.partitions=3840
</spark-opts> </spark-opts>
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg> <arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark> </spark>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -111,18 +111,18 @@
</configuration> </configuration>
</global> </global>
<start to="stats"/> <start to="partition"/>
<kill name="Kill"> <kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message> <message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill> </kill>
<action name="stats"> <action name="partition">
<spark xmlns="uri:oozie:spark-action:0.2"> <spark xmlns="uri:oozie:spark-action:0.2">
<master>yarn</master> <master>yarn</master>
<mode>cluster</mode> <mode>cluster</mode>
<name>GenerateStatsJob</name> <name>PartitionEventsByDsIdJob</name>
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class> <class>eu.dnetlib.dhp.broker.oa.PartitionEventsByDsIdJob</class>
<jar>dhp-broker-events-${projectVersion}.jar</jar> <jar>dhp-broker-events-${projectVersion}.jar</jar>
<spark-opts> <spark-opts>
--executor-cores=${sparkExecutorCores} --executor-cores=${sparkExecutorCores}
@ -134,11 +134,8 @@
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir} --conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
--conf spark.sql.shuffle.partitions=3840 --conf spark.sql.shuffle.partitions=3840
</spark-opts> </spark-opts>
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
<arg>--workingPath</arg><arg>${workingPath}</arg> <arg>--workingPath</arg><arg>${workingPath}</arg>
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
</spark> </spark>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>