removed indicators from Instance
This commit is contained in:
parent
71862838b0
commit
2d2b62386f
|
@ -26,8 +26,8 @@ import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
|
||||||
*/
|
*/
|
||||||
public class Instance implements Serializable {
|
public class Instance implements Serializable {
|
||||||
|
|
||||||
@JsonSchema(description = "Indicators computed for this instance, for example Bip!Finder ones")
|
// @JsonSchema(description = "Indicators computed for this instance, for example Bip!Finder ones")
|
||||||
private Indicator indicators;
|
// private Indicator indicators;
|
||||||
|
|
||||||
private List<ResultPid> pid;
|
private List<ResultPid> pid;
|
||||||
|
|
||||||
|
@ -141,12 +141,12 @@ public class Instance implements Serializable {
|
||||||
this.alternateIdentifier = alternateIdentifier;
|
this.alternateIdentifier = alternateIdentifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
// @JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
public Indicator getIndicators() {
|
// public Indicator getIndicators() {
|
||||||
return indicators;
|
// return indicators;
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
public void setIndicators(Indicator indicators) {
|
// public void setIndicators(Indicator indicators) {
|
||||||
this.indicators = indicators;
|
// this.indicators = indicators;
|
||||||
}
|
// }
|
||||||
}
|
}
|
||||||
|
|
|
@ -548,7 +548,7 @@ public class ResultMapper implements Serializable {
|
||||||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||||
|
|
||||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
|
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
|
||||||
if (mes.isPresent()) {
|
// if (mes.isPresent()) {
|
||||||
// List<Indicator> indicators = new ArrayList<>();
|
// List<Indicator> indicators = new ArrayList<>();
|
||||||
// mes
|
// mes
|
||||||
// .get()
|
// .get()
|
||||||
|
@ -563,8 +563,8 @@ public class ResultMapper implements Serializable {
|
||||||
// .stream()
|
// .stream()
|
||||||
// .map(u -> Measure.newInstance(u.getKey(), u.getValue()))
|
// .map(u -> Measure.newInstance(u.getKey(), u.getValue()))
|
||||||
// .collect(Collectors.toList()))));
|
// .collect(Collectors.toList()))));
|
||||||
instance.setIndicators(getIndicator(mes.get()));
|
// instance.setIndicators(getIndicator(mes.get()));
|
||||||
}
|
// }
|
||||||
|
|
||||||
if (opAr.get().getOpenAccessRoute() != null) {
|
if (opAr.get().getOpenAccessRoute() != null) {
|
||||||
switch (opAr.get().getOpenAccessRoute()) {
|
switch (opAr.get().getOpenAccessRoute()) {
|
||||||
|
|
|
@ -112,7 +112,7 @@ public class SparkSelectValidRelationContext implements Serializable {
|
||||||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||||
|
|
||||||
Dataset<ResearchCommunity> allowedContext = Utils
|
Dataset<ResearchCommunity> allowedContext = Utils
|
||||||
.readPath(spark, inputPath + "/community_infrastructure", ResearchCommunity.class);
|
.readPath(spark, inputPath + "/communities_infrastructures", ResearchCommunity.class);
|
||||||
|
|
||||||
Dataset<Tuple2<String, Relation>> relJoinSource = relationSource
|
Dataset<Tuple2<String, Relation>> relJoinSource = relationSource
|
||||||
.joinWith(dumpedIds, relationSource.col("_1").equalTo(dumpedIds.col("value")))
|
.joinWith(dumpedIds, relationSource.col("_1").equalTo(dumpedIds.col("value")))
|
||||||
|
|
|
@ -65,7 +65,6 @@
|
||||||
<name>organizationCommunityMap</name>
|
<name>organizationCommunityMap</name>
|
||||||
<description>the organization community map</description>
|
<description>the organization community map</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
<name>hiveDbName</name>
|
<name>hiveDbName</name>
|
||||||
<description>the target hive database name</description>
|
<description>the target hive database name</description>
|
||||||
|
@ -134,8 +133,8 @@
|
||||||
</configuration>
|
</configuration>
|
||||||
</global>
|
</global>
|
||||||
|
|
||||||
<start to="only_upload"/>
|
<!-- <start to="only_upload"/>-->
|
||||||
|
<start to="make_archive"/>
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
</kill>
|
</kill>
|
||||||
|
@ -215,7 +214,7 @@
|
||||||
</property>
|
</property>
|
||||||
</configuration>
|
</configuration>
|
||||||
</sub-workflow>
|
</sub-workflow>
|
||||||
<ok to="End" />
|
<ok to="make_archive" />
|
||||||
<error to="Kill" />
|
<error to="Kill" />
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
@ -309,12 +308,23 @@
|
||||||
<error to="Kill" />
|
<error to="Kill" />
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
<!-- <action name="make_archive">-->
|
||||||
|
<!-- <java>-->
|
||||||
|
<!-- <main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>-->
|
||||||
|
<!-- <arg>--hdfsPath</arg><arg>${outputPath}</arg>-->
|
||||||
|
<!-- <arg>--nameNode</arg><arg>${nameNode}</arg>-->
|
||||||
|
<!-- <arg>--sourcePath</arg><arg>${workingDir}/tar</arg>-->
|
||||||
|
<!-- </java>-->
|
||||||
|
<!-- <ok to="should_upload"/>-->
|
||||||
|
<!-- <error to="Kill"/>-->
|
||||||
|
<!-- </action>-->
|
||||||
|
|
||||||
<action name="make_archive">
|
<action name="make_archive">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
<arg>--hdfsPath</arg><arg>${outputPath}/tar</arg>
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
<arg>--sourcePath</arg><arg>${workingDir}/tar</arg>
|
<arg>--sourcePath</arg><arg>${outputPath}/dump</arg>
|
||||||
</java>
|
</java>
|
||||||
<ok to="should_upload"/>
|
<ok to="should_upload"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -330,7 +340,8 @@
|
||||||
<action name="send_zenodo">
|
<action name="send_zenodo">
|
||||||
<java>
|
<java>
|
||||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
||||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
<!-- <arg>--hdfsPath</arg><arg>${outputPath}</arg>-->
|
||||||
|
<arg>--hdfsPath</arg><arg>${outputPath}/tar</arg>
|
||||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
||||||
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
||||||
|
|
|
@ -438,10 +438,10 @@ public class DumpJobTest {
|
||||||
Assertions.assertEquals("2017-01-01", instance.getPublicationdate());
|
Assertions.assertEquals("2017-01-01", instance.getPublicationdate());
|
||||||
Assertions.assertEquals(null, instance.getArticleprocessingcharge());
|
Assertions.assertEquals(null, instance.getArticleprocessingcharge());
|
||||||
Assertions.assertEquals("peerReviewed", instance.getRefereed());
|
Assertions.assertEquals("peerReviewed", instance.getRefereed());
|
||||||
Indicator indicator = instance.getIndicators();
|
// Indicator indicator = instance.getIndicators();
|
||||||
|
//
|
||||||
Assertions.assertFalse(Optional.ofNullable(indicator.getUsageCounts()).isPresent());
|
// Assertions.assertFalse(Optional.ofNullable(indicator.getUsageCounts()).isPresent());
|
||||||
Assertions.assertTrue(Optional.ofNullable(indicator.getImpactMeasures()).isPresent());
|
// Assertions.assertTrue(Optional.ofNullable(indicator.getImpactMeasures()).isPresent());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
Loading…
Reference in New Issue