forked from D-Net/dnet-hadoop
merge branch with master
This commit is contained in:
commit
7b6a7333e6
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
package eu.dnetlib.dhp.broker.model;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
|
||||||
|
import eu.dnetlib.broker.api.ShortEventMessage;
|
||||||
|
|
||||||
|
public class ShortEventMessageWithGroupId extends ShortEventMessage implements Serializable {
|
||||||
|
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
private static final long serialVersionUID = 4704889388757626630L;
|
||||||
|
|
||||||
|
private String group;
|
||||||
|
|
||||||
|
public String getGroup() {
|
||||||
|
return group;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setGroup(final String group) {
|
||||||
|
this.group = group;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -14,6 +14,7 @@ import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClients;
|
import org.apache.http.impl.client.HttpClients;
|
||||||
import org.apache.spark.SparkConf;
|
import org.apache.spark.SparkConf;
|
||||||
import org.apache.spark.sql.Encoders;
|
import org.apache.spark.sql.Encoders;
|
||||||
|
import org.apache.spark.sql.SaveMode;
|
||||||
import org.apache.spark.sql.TypedColumn;
|
import org.apache.spark.sql.TypedColumn;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -74,6 +75,7 @@ public class GenerateStatsJob {
|
||||||
.agg(aggr)
|
.agg(aggr)
|
||||||
.map(t -> t._2, Encoders.bean(DatasourceStats.class))
|
.map(t -> t._2, Encoders.bean(DatasourceStats.class))
|
||||||
.write()
|
.write()
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
.jdbc(dbUrl, "oa_datasource_stats_temp", connectionProperties);
|
.jdbc(dbUrl, "oa_datasource_stats_temp", connectionProperties);
|
||||||
|
|
||||||
log.info("*** updateStats");
|
log.info("*** updateStats");
|
||||||
|
|
|
@ -20,12 +20,11 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import com.google.gson.Gson;
|
import com.google.gson.Gson;
|
||||||
|
|
||||||
import eu.dnetlib.broker.api.ShortEventMessage;
|
|
||||||
import eu.dnetlib.broker.objects.OaBrokerEventPayload;
|
import eu.dnetlib.broker.objects.OaBrokerEventPayload;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.broker.model.Event;
|
import eu.dnetlib.dhp.broker.model.Event;
|
||||||
|
import eu.dnetlib.dhp.broker.model.ShortEventMessageWithGroupId;
|
||||||
import eu.dnetlib.dhp.broker.oa.util.ClusterUtils;
|
import eu.dnetlib.dhp.broker.oa.util.ClusterUtils;
|
||||||
import scala.Tuple2;
|
|
||||||
|
|
||||||
public class PartitionEventsByDsIdJob {
|
public class PartitionEventsByDsIdJob {
|
||||||
|
|
||||||
|
@ -61,13 +60,11 @@ public class PartitionEventsByDsIdJob {
|
||||||
.readPath(spark, eventsPath, Event.class)
|
.readPath(spark, eventsPath, Event.class)
|
||||||
.filter(e -> StringUtils.isNotBlank(e.getMap().getTargetDatasourceId()))
|
.filter(e -> StringUtils.isNotBlank(e.getMap().getTargetDatasourceId()))
|
||||||
.filter(e -> e.getMap().getTargetDatasourceId().contains(OPENDOAR_NSPREFIX))
|
.filter(e -> e.getMap().getTargetDatasourceId().contains(OPENDOAR_NSPREFIX))
|
||||||
.map(
|
.limit(10000)
|
||||||
e -> new Tuple2<>(
|
.map(e -> messageFromNotification(e), Encoders.bean(ShortEventMessageWithGroupId.class))
|
||||||
StringUtils.substringAfter(e.getMap().getTargetDatasourceId(), OPENDOAR_NSPREFIX),
|
.coalesce(1)
|
||||||
messageFromNotification(e)),
|
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(ShortEventMessage.class)))
|
|
||||||
.write()
|
.write()
|
||||||
.partitionBy("_1")
|
.partitionBy("group")
|
||||||
.mode(SaveMode.Overwrite)
|
.mode(SaveMode.Overwrite)
|
||||||
.json(partitionPath);
|
.json(partitionPath);
|
||||||
|
|
||||||
|
@ -77,7 +74,6 @@ public class PartitionEventsByDsIdJob {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void renameSubDirs(final String path) throws IOException {
|
private static void renameSubDirs(final String path) throws IOException {
|
||||||
final String prefix = "_1=";
|
|
||||||
final FileSystem fs = FileSystem.get(new Configuration());
|
final FileSystem fs = FileSystem.get(new Configuration());
|
||||||
|
|
||||||
log.info("** Renaming subdirs of " + path);
|
log.info("** Renaming subdirs of " + path);
|
||||||
|
@ -85,8 +81,8 @@ public class PartitionEventsByDsIdJob {
|
||||||
if (fileStatus.isDirectory()) {
|
if (fileStatus.isDirectory()) {
|
||||||
final Path oldPath = fileStatus.getPath();
|
final Path oldPath = fileStatus.getPath();
|
||||||
final String oldName = oldPath.getName();
|
final String oldName = oldPath.getName();
|
||||||
if (oldName.startsWith(prefix)) {
|
if (oldName.contains("=")) {
|
||||||
final Path newPath = new Path(path + "/" + StringUtils.substringAfter(oldName, prefix));
|
final Path newPath = new Path(path + "/" + StringUtils.substringAfter(oldName, "="));
|
||||||
log.info(" * " + oldPath.getName() + " -> " + newPath.getName());
|
log.info(" * " + oldPath.getName() + " -> " + newPath.getName());
|
||||||
fs.rename(oldPath, newPath);
|
fs.rename(oldPath, newPath);
|
||||||
}
|
}
|
||||||
|
@ -94,18 +90,19 @@ public class PartitionEventsByDsIdJob {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ShortEventMessage messageFromNotification(final Event e) {
|
private static ShortEventMessageWithGroupId messageFromNotification(final Event e) {
|
||||||
final Gson gson = new Gson();
|
final Gson gson = new Gson();
|
||||||
|
|
||||||
final OaBrokerEventPayload payload = gson.fromJson(e.getPayload(), OaBrokerEventPayload.class);
|
final OaBrokerEventPayload payload = gson.fromJson(e.getPayload(), OaBrokerEventPayload.class);
|
||||||
|
|
||||||
final ShortEventMessage res = new ShortEventMessage();
|
final ShortEventMessageWithGroupId res = new ShortEventMessageWithGroupId();
|
||||||
|
|
||||||
res.setOriginalId(payload.getResult().getOriginalId());
|
res.setOriginalId(payload.getResult().getOriginalId());
|
||||||
res.setTitle(payload.getResult().getTitles().stream().filter(StringUtils::isNotBlank).findFirst().orElse(null));
|
res.setTitle(payload.getResult().getTitles().stream().filter(StringUtils::isNotBlank).findFirst().orElse(null));
|
||||||
res.setTopic(e.getTopic());
|
res.setTopic(e.getTopic());
|
||||||
res.setTrust(payload.getTrust());
|
res.setTrust(payload.getTrust());
|
||||||
res.generateMessageFromObject(payload.getHighlight());
|
res.generateMessageFromObject(payload.getHighlight());
|
||||||
|
res.setGroup(StringUtils.substringAfter(e.getMap().getTargetDatasourceId(), OPENDOAR_NSPREFIX));
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
|
@ -44,6 +44,18 @@
|
||||||
<name>brokerApiBaseUrl</name>
|
<name>brokerApiBaseUrl</name>
|
||||||
<description>the url of the broker service api</description>
|
<description>the url of the broker service api</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerDbUrl</name>
|
||||||
|
<description>the url of the broker database</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerDbUser</name>
|
||||||
|
<description>the user of the broker database</description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>brokerDbPassword</name>
|
||||||
|
<description>the password of the broker database</description>
|
||||||
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>sparkDriverMemory</name>
|
<name>sparkDriverMemory</name>
|
||||||
<description>memory for driver process</description>
|
<description>memory for driver process</description>
|
||||||
|
@ -509,8 +521,11 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
|
||||||
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
||||||
|
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
|
||||||
|
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
|
||||||
|
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
|
||||||
|
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -111,18 +111,18 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="stats"/>
|
<start to="partition"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
|
||||||
<action name="stats">
|
<action name="partition">
|
||||||
<spark xmlns="uri:oozie:spark-action:0.2">
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
<master>yarn</master>
|
<master>yarn</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>GenerateStatsJob</name>
|
<name>PartitionEventsByDsIdJob</name>
|
||||||
<class>eu.dnetlib.dhp.broker.oa.GenerateStatsJob</class>
|
<class>eu.dnetlib.dhp.broker.oa.PartitionEventsByDsIdJob</class>
|
||||||
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
<jar>dhp-broker-events-${projectVersion}.jar</jar>
|
||||||
<spark-opts>
|
<spark-opts>
|
||||||
--executor-cores=${sparkExecutorCores}
|
--executor-cores=${sparkExecutorCores}
|
||||||
|
@ -134,11 +134,8 @@
|
||||||
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
--conf spark.eventLog.dir=${nameNode}${spark2EventLogDir}
|
||||||
--conf spark.sql.shuffle.partitions=3840
|
--conf spark.sql.shuffle.partitions=3840
|
||||||
</spark-opts>
|
</spark-opts>
|
||||||
|
<arg>--graphPath</arg><arg>${graphInputPath}</arg>
|
||||||
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
<arg>--workingPath</arg><arg>${workingPath}</arg>
|
||||||
<arg>--dbUrl</arg><arg>${brokerDbUrl}</arg>
|
|
||||||
<arg>--dbUser</arg><arg>${brokerDbUser}</arg>
|
|
||||||
<arg>--dbPassword</arg><arg>${brokerDbPassword}</arg>
|
|
||||||
<arg>--brokerApiBaseUrl</arg><arg>${brokerApiBaseUrl}</arg>
|
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
|
|
@ -577,7 +577,7 @@ public class MigrateDbEntitiesApplication extends AbstractMigrationApplication i
|
||||||
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;
|
final String lissn = StringUtils.isNotBlank(arr[2]) ? arr[2].trim() : null;
|
||||||
|
|
||||||
if (issn != null || eissn != null || lissn != null) {
|
if (issn != null || eissn != null || lissn != null) {
|
||||||
return journal(name, issn, eissn, eissn, null, null, null, null, null, null, null, info);
|
return journal(name, issn, eissn, lissn, null, null, null, null, null, null, null, info);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -85,7 +85,7 @@ SELECT
|
||||||
dc.officialname AS collectedfromname,
|
dc.officialname AS collectedfromname,
|
||||||
d.typology||'@@@dnet:datasource_typologies' AS datasourcetype,
|
d.typology||'@@@dnet:datasource_typologies' AS datasourcetype,
|
||||||
'sysimport:crosswalk:entityregistry@@@dnet:provenance_actions' AS provenanceaction,
|
'sysimport:crosswalk:entityregistry@@@dnet:provenance_actions' AS provenanceaction,
|
||||||
d.issn || ' @@@ ' || d.eissn || ' @@@ ' || d.lissn AS journal
|
concat_ws(' @@@ ', d.issn, d.eissn, d.lissn) AS journal
|
||||||
|
|
||||||
FROM dsm_datasources d
|
FROM dsm_datasources d
|
||||||
|
|
||||||
|
|
|
@ -73,12 +73,16 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
final Datasource ds = (Datasource) list.get(0);
|
final Datasource ds = (Datasource) list.get(0);
|
||||||
assertValidId(ds.getId());
|
assertValidId(ds.getId());
|
||||||
assertValidId(ds.getCollectedfrom().get(0).getKey());
|
assertValidId(ds.getCollectedfrom().get(0).getKey());
|
||||||
assertEquals(ds.getOfficialname().getValue(), getValueAsString("officialname", fields));
|
assertEquals(getValueAsString("officialname", fields), ds.getOfficialname().getValue());
|
||||||
assertEquals(ds.getEnglishname().getValue(), getValueAsString("englishname", fields));
|
assertEquals(getValueAsString("englishname", fields), ds.getEnglishname().getValue());
|
||||||
assertEquals(ds.getContactemail().getValue(), getValueAsString("contactemail", fields));
|
assertEquals(getValueAsString("contactemail", fields), ds.getContactemail().getValue());
|
||||||
assertEquals(ds.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
assertEquals(getValueAsString("websiteurl", fields), ds.getWebsiteurl().getValue());
|
||||||
assertEquals(ds.getNamespaceprefix().getValue(), getValueAsString("namespaceprefix", fields));
|
assertEquals(getValueAsString("namespaceprefix", fields), ds.getNamespaceprefix().getValue());
|
||||||
assertEquals(ds.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
assertEquals(getValueAsString("collectedfromname", fields), ds.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(getValueAsString("officialname", fields), ds.getJournal().getName());
|
||||||
|
assertEquals("2579-5449", ds.getJournal().getIssnPrinted());
|
||||||
|
assertEquals("2597-6540", ds.getJournal().getIssnOnline());
|
||||||
|
assertEquals(null, ds.getJournal().getIssnLinking());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -92,9 +96,11 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
final Project p = (Project) list.get(0);
|
final Project p = (Project) list.get(0);
|
||||||
assertValidId(p.getId());
|
assertValidId(p.getId());
|
||||||
assertValidId(p.getCollectedfrom().get(0).getKey());
|
assertValidId(p.getCollectedfrom().get(0).getKey());
|
||||||
assertEquals(p.getAcronym().getValue(), getValueAsString("acronym", fields));
|
assertEquals(getValueAsString("acronym", fields), p.getAcronym().getValue());
|
||||||
assertEquals(p.getTitle().getValue(), getValueAsString("title", fields));
|
assertEquals(getValueAsString("title", fields), p.getTitle().getValue());
|
||||||
assertEquals(p.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
assertEquals(getValueAsString("collectedfromname", fields), p.getCollectedfrom().get(0).getValue());
|
||||||
|
assertEquals(getValueAsFloat("fundedamount", fields), p.getFundedamount());
|
||||||
|
assertEquals(getValueAsFloat("totalcost", fields), p.getTotalcost());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -110,14 +116,14 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
final Organization o = (Organization) list.get(0);
|
final Organization o = (Organization) list.get(0);
|
||||||
assertValidId(o.getId());
|
assertValidId(o.getId());
|
||||||
assertValidId(o.getCollectedfrom().get(0).getKey());
|
assertValidId(o.getCollectedfrom().get(0).getKey());
|
||||||
assertEquals(o.getLegalshortname().getValue(), getValueAsString("legalshortname", fields));
|
assertEquals(getValueAsString("legalshortname", fields), o.getLegalshortname().getValue());
|
||||||
assertEquals(o.getLegalname().getValue(), getValueAsString("legalname", fields));
|
assertEquals(getValueAsString("legalname", fields), o.getLegalname().getValue());
|
||||||
assertEquals(o.getWebsiteurl().getValue(), getValueAsString("websiteurl", fields));
|
assertEquals(getValueAsString("websiteurl", fields), o.getWebsiteurl().getValue());
|
||||||
assertEquals(o.getCountry().getClassid(), getValueAsString("country", fields).split("@@@")[0]);
|
assertEquals(getValueAsString("country", fields).split("@@@")[0], o.getCountry().getClassid());
|
||||||
assertEquals(o.getCountry().getClassname(), getValueAsString("country", fields).split("@@@")[0]);
|
assertEquals(getValueAsString("country", fields).split("@@@")[0], o.getCountry().getClassname());
|
||||||
assertEquals(o.getCountry().getSchemeid(), getValueAsString("country", fields).split("@@@")[1]);
|
assertEquals(getValueAsString("country", fields).split("@@@")[1], o.getCountry().getSchemeid());
|
||||||
assertEquals(o.getCountry().getSchemename(), getValueAsString("country", fields).split("@@@")[1]);
|
assertEquals(getValueAsString("country", fields).split("@@@")[1], o.getCountry().getSchemename());
|
||||||
assertEquals(o.getCollectedfrom().get(0).getValue(), getValueAsString("collectedfromname", fields));
|
assertEquals(getValueAsString("collectedfromname", fields), o.getCollectedfrom().get(0).getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -322,12 +328,20 @@ public class MigrateDbEntitiesApplicationTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getValueAsString(final String name, final List<TypedField> fields) {
|
private String getValueAsString(final String name, final List<TypedField> fields) {
|
||||||
|
return getValueAs(name, fields);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Float getValueAsFloat(final String name, final List<TypedField> fields) {
|
||||||
|
return new Float(getValueAs(name, fields).toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T getValueAs(final String name, final List<TypedField> fields) {
|
||||||
return fields
|
return fields
|
||||||
.stream()
|
.stream()
|
||||||
.filter(f -> f.getField().equals(name))
|
.filter(f -> f.getField().equals(name))
|
||||||
.map(TypedField::getValue)
|
.map(TypedField::getValue)
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.map(o -> o.toString())
|
.map(o -> (T) o)
|
||||||
.findFirst()
|
.findFirst()
|
||||||
.get();
|
.get();
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,12 +142,12 @@
|
||||||
{
|
{
|
||||||
"field": "totalcost",
|
"field": "totalcost",
|
||||||
"type": "double",
|
"type": "double",
|
||||||
"value": null
|
"value": 157846
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "fundedamount",
|
"field": "fundedamount",
|
||||||
"type": "double",
|
"type": "double",
|
||||||
"value": null
|
"value": 157846
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"field": "collectedfromid",
|
"field": "collectedfromid",
|
||||||
|
|
Loading…
Reference in New Issue