removed indicators from Instance

This commit is contained in:
Miriam Baglioni 2022-12-28 21:50:48 +01:00
parent 71862838b0
commit 2d2b62386f
5 changed files with 36 additions and 25 deletions

View File

@ -26,8 +26,8 @@ import com.github.imifou.jsonschema.module.addon.annotation.JsonSchema;
*/
public class Instance implements Serializable {
@JsonSchema(description = "Indicators computed for this instance, for example Bip!Finder ones")
private Indicator indicators;
// @JsonSchema(description = "Indicators computed for this instance, for example Bip!Finder ones")
// private Indicator indicators;
private List<ResultPid> pid;
@ -141,12 +141,12 @@ public class Instance implements Serializable {
this.alternateIdentifier = alternateIdentifier;
}
@JsonInclude(JsonInclude.Include.NON_NULL)
public Indicator getIndicators() {
return indicators;
}
public void setIndicators(Indicator indicators) {
this.indicators = indicators;
}
// @JsonInclude(JsonInclude.Include.NON_NULL)
// public Indicator getIndicators() {
// return indicators;
// }
//
// public void setIndicators(Indicator indicators) {
// this.indicators = indicators;
// }
}

View File

@ -548,7 +548,7 @@ public class ResultMapper implements Serializable {
Constants.COAR_ACCESS_RIGHT_SCHEMA));
Optional<List<eu.dnetlib.dhp.schema.oaf.Measure>> mes = Optional.ofNullable(i.getMeasures());
if (mes.isPresent()) {
// if (mes.isPresent()) {
// List<Indicator> indicators = new ArrayList<>();
// mes
// .get()
@ -563,8 +563,8 @@ public class ResultMapper implements Serializable {
// .stream()
// .map(u -> Measure.newInstance(u.getKey(), u.getValue()))
// .collect(Collectors.toList()))));
instance.setIndicators(getIndicator(mes.get()));
}
// instance.setIndicators(getIndicator(mes.get()));
// }
if (opAr.get().getOpenAccessRoute() != null) {
switch (opAr.get().getOpenAccessRoute()) {

View File

@ -112,7 +112,7 @@ public class SparkSelectValidRelationContext implements Serializable {
Encoders.tuple(Encoders.STRING(), Encoders.bean(Relation.class)));
Dataset<ResearchCommunity> allowedContext = Utils
.readPath(spark, inputPath + "/community_infrastructure", ResearchCommunity.class);
.readPath(spark, inputPath + "/communities_infrastructures", ResearchCommunity.class);
Dataset<Tuple2<String, Relation>> relJoinSource = relationSource
.joinWith(dumpedIds, relationSource.col("_1").equalTo(dumpedIds.col("value")))

View File

@ -65,7 +65,6 @@
<name>organizationCommunityMap</name>
<description>the organization community map</description>
</property>
<property>
<name>hiveDbName</name>
<description>the target hive database name</description>
@ -134,8 +133,8 @@
</configuration>
</global>
<start to="only_upload"/>
<!-- <start to="only_upload"/>-->
<start to="make_archive"/>
<kill name="Kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
@ -215,7 +214,7 @@
</property>
</configuration>
</sub-workflow>
<ok to="End" />
<ok to="make_archive" />
<error to="Kill" />
</action>
@ -309,12 +308,23 @@
<error to="Kill" />
</action>
<!-- <action name="make_archive">-->
<!-- <java>-->
<!-- <main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>-->
<!-- <arg>&#45;&#45;hdfsPath</arg><arg>${outputPath}</arg>-->
<!-- <arg>&#45;&#45;nameNode</arg><arg>${nameNode}</arg>-->
<!-- <arg>&#45;&#45;sourcePath</arg><arg>${workingDir}/tar</arg>-->
<!-- </java>-->
<!-- <ok to="should_upload"/>-->
<!-- <error to="Kill"/>-->
<!-- </action>-->
<action name="make_archive">
<java>
<main-class>eu.dnetlib.dhp.oa.graph.dump.MakeTar</main-class>
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
<arg>--hdfsPath</arg><arg>${outputPath}/tar</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
<arg>--sourcePath</arg><arg>${workingDir}/tar</arg>
<arg>--sourcePath</arg><arg>${outputPath}/dump</arg>
</java>
<ok to="should_upload"/>
<error to="Kill"/>
@ -330,7 +340,8 @@
<action name="send_zenodo">
<java>
<main-class>eu.dnetlib.dhp.oa.graph.dump.SendToZenodoHDFS</main-class>
<arg>--hdfsPath</arg><arg>${outputPath}</arg>
<!-- <arg>&#45;&#45;hdfsPath</arg><arg>${outputPath}</arg>-->
<arg>&#45;&#45;hdfsPath</arg><arg>${outputPath}/tar</arg>
<arg>--nameNode</arg><arg>${nameNode}</arg>
<arg>--accessToken</arg><arg>${accessToken}</arg>
<arg>--connectionUrl</arg><arg>${connectionUrl}</arg>

View File

@ -438,10 +438,10 @@ public class DumpJobTest {
Assertions.assertEquals("2017-01-01", instance.getPublicationdate());
Assertions.assertEquals(null, instance.getArticleprocessingcharge());
Assertions.assertEquals("peerReviewed", instance.getRefereed());
Indicator indicator = instance.getIndicators();
Assertions.assertFalse(Optional.ofNullable(indicator.getUsageCounts()).isPresent());
Assertions.assertTrue(Optional.ofNullable(indicator.getImpactMeasures()).isPresent());
// Indicator indicator = instance.getIndicators();
//
// Assertions.assertFalse(Optional.ofNullable(indicator.getUsageCounts()).isPresent());
// Assertions.assertTrue(Optional.ofNullable(indicator.getImpactMeasures()).isPresent());
}
@Test