removed indicators from Instance
This commit is contained in:
parent
71862838b0
commit
2d2b62386f
|
@ -26,8 +26,8 @@ import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
|
|||
*/
|
||||
public class Instance implements Serializable {
|
||||
|
||||
@JsonSchema(description = "Indicators computed for this instance, for example Bip!Finder ones")
|
||||
private Indicator indicators;
|
||||
// @JsonSchema(description = "Indicators computed for this instance, for example Bip!Finder ones")
|
||||
// private Indicator indicators;
|
||||
|
||||
private List<ResultPid> pid;
|
||||
|
||||
|
@ -141,12 +141,12 @@ public class Instance implements Serializable {
|
|||
this.alternateIdentifier = alternateIdentifier;
|
||||
}
|
||||
|
||||
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
public Indicator getIndicators() {
|
||||
return indicators;
|
||||
}
|
||||
|
||||
public void setIndicators(Indicator indicators) {
|
||||
this.indicators = indicators;
|
||||
}
|
||||
// @JsonInclude(JsonInclude.Include.NON_NULL)
|
||||
// public Indicator getIndicators() {
|
||||
// return indicators;
|
||||
// }
|
||||
//
|
||||
// public void setIndicators(Indicator indicators) {
|
||||
// this.indicators = indicators;
|
||||
// }
|
||||
}
|
||||
|
|
|
@ -548,7 +548,7 @@ public class ResultMapper implements Serializable {
|
|||
Constants.COAR_ACCESS_RIGHT_SCHEMA));
|
||||
|
||||
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
|
||||
if (mes.isPresent()) {
|
||||
// if (mes.isPresent()) {
|
||||
// List<Indicator> indicators = new ArrayList<>();
|
||||
// mes
|
||||
// .get()
|
||||
|
@ -563,8 +563,8 @@ public class ResultMapper implements Serializable {
|
|||
// .stream()
|
||||
// .map(u -> Measure.newInstance(u.getKey(), u.getValue()))
|
||||
// .collect(Collectors.toList()))));
|
||||
instance.setIndicators(getIndicator(mes.get()));
|
||||
}
|
||||
// instance.setIndicators(getIndicator(mes.get()));
|
||||
// }
|
||||
|
||||
if (opAr.get().getOpenAccessRoute() != null) {
|
||||
switch (opAr.get().getOpenAccessRoute()) {
|
||||
|
|
|
@ -112,7 +112,7 @@ public class SparkSelectValidRelationContext implements Serializable {
|
|||
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
|
||||
|
||||
Dataset<ResearchCommunity> allowedContext = Utils
|
||||
.readPath(spark, inputPath + "/community_infrastructure", ResearchCommunity.class);
|
||||
.readPath(spark, inputPath + "/communities_infrastructures", ResearchCommunity.class);
|
||||
|
||||
Dataset<Tuple2<String, Relation>> relJoinSource = relationSource
|
||||
.joinWith(dumpedIds, relationSource.col("_1").equalTo(dumpedIds.col("value")))
|
||||
|
|
|
@ -65,7 +65,6 @@
|
|||
<name>organizationCommunityMap</name>
|
||||
<description>the organization community map</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>hiveDbName</name>
|
||||
<description>the target hive database name</description>
|
||||
|
@ -134,8 +133,8 @@
|
|||
</configuration>
|
||||
</global>
|
||||
|
||||
<start to="only_upload"/>
|
||||
|
||||
<!-- <start to="only_upload"/>-->
|
||||
<start to="make_archive"/>
|
||||
<kill name="Kill">
|
||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||
</kill>
|
||||
|
@ -215,7 +214,7 @@
|
|||
</property>
|
||||
</configuration>
|
||||
</sub-workflow>
|
||||
<ok to="End" />
|
||||
<ok to="make_archive" />
|
||||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
|
@ -309,12 +308,23 @@
|
|||
<error to="Kill" />
|
||||
</action>
|
||||
|
||||
<!-- <action name="make_archive">-->
|
||||
<!-- <java>-->
|
||||
<!-- <main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>-->
|
||||
<!-- <arg>--hdfsPath</arg><arg>${outputPath}</arg>-->
|
||||
<!-- <arg>--nameNode</arg><arg>${nameNode}</arg>-->
|
||||
<!-- <arg>--sourcePath</arg><arg>${workingDir}/tar</arg>-->
|
||||
<!-- </java>-->
|
||||
<!-- <ok to="should_upload"/>-->
|
||||
<!-- <error to="Kill"/>-->
|
||||
<!-- </action>-->
|
||||
|
||||
<action name="make_archive">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}/tar</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--sourcePath</arg><arg>${workingDir}/tar</arg>
|
||||
<arg>--sourcePath</arg><arg>${outputPath}/dump</arg>
|
||||
</java>
|
||||
<ok to="should_upload"/>
|
||||
<error to="Kill"/>
|
||||
|
@ -330,7 +340,8 @@
|
|||
<action name="send_zenodo">
|
||||
<java>
|
||||
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
|
||||
<!-- <arg>--hdfsPath</arg><arg>${outputPath}</arg>-->
|
||||
<arg>--hdfsPath</arg><arg>${outputPath}/tar</arg>
|
||||
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||
<arg>--accessToken</arg><arg>${accessToken}</arg>
|
||||
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>
|
||||
|
|
|
@ -438,10 +438,10 @@ public class DumpJobTest {
|
|||
Assertions.assertEquals("2017-01-01", instance.getPublicationdate());
|
||||
Assertions.assertEquals(null, instance.getArticleprocessingcharge());
|
||||
Assertions.assertEquals("peerReviewed", instance.getRefereed());
|
||||
Indicator indicator = instance.getIndicators();
|
||||
|
||||
Assertions.assertFalse(Optional.ofNullable(indicator.getUsageCounts()).isPresent());
|
||||
Assertions.assertTrue(Optional.ofNullable(indicator.getImpactMeasures()).isPresent());
|
||||
// Indicator indicator = instance.getIndicators();
|
||||
//
|
||||
// Assertions.assertFalse(Optional.ofNullable(indicator.getUsageCounts()).isPresent());
|
||||
// Assertions.assertTrue(Optional.ofNullable(indicator.getImpactMeasures()).isPresent());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
Loading…
Reference in New Issue