forked from D-Net/dnet-hadoop
minor changes and refactoring
This commit is contained in:
parent
59615da65e
commit
618d2de2da
|
@ -428,7 +428,7 @@ public class ResultMapper implements Serializable {
|
|||
if (oPca.isPresent() && oPcc.isPresent()) {
|
||||
Field<String> pca = oPca.get();
|
||||
Field<String> pcc = oPcc.get();
|
||||
if(!pca.getValue().trim().equals("") && !pcc.getValue().trim().equals("")){
|
||||
if (!pca.getValue().trim().equals("") && !pcc.getValue().trim().equals("")) {
|
||||
APC apc = new APC();
|
||||
apc.setCurrency(oPcc.get().getValue());
|
||||
apc.setAmount(oPca.get().getValue());
|
||||
|
|
|
@ -70,10 +70,10 @@ public class CreateContextRelation implements Serializable {
|
|||
cce.execute(Process::getRelation, CONTEX_RELATION_DATASOURCE, ModelSupport.getIdPrefix(Datasource.class));
|
||||
|
||||
log.info("Creating relations for projects... ");
|
||||
// cce
|
||||
// .execute(
|
||||
// Process::getRelation, CONTEX_RELATION_PROJECT,
|
||||
// ModelSupport.getIdPrefix(eu.dnetlib.dhp.schema.oaf.Project.class));
|
||||
cce
|
||||
.execute(
|
||||
Process::getRelation, CONTEX_RELATION_PROJECT,
|
||||
ModelSupport.getIdPrefix(eu.dnetlib.dhp.schema.oaf.Project.class));
|
||||
|
||||
cce.close();
|
||||
|
||||
|
|
|
@ -461,7 +461,7 @@ public class DumpGraphEntities implements Serializable {
|
|||
}
|
||||
|
||||
private static Organization mapOrganization(eu.dnetlib.dhp.schema.oaf.Organization org) {
|
||||
if(org.getDataInfo().getDeletedbyinference())
|
||||
if (org.getDataInfo().getDeletedbyinference())
|
||||
return null;
|
||||
Organization organization = new Organization();
|
||||
|
||||
|
|
|
@ -147,7 +147,7 @@ public class Extractor implements Serializable {
|
|||
.map(
|
||||
paction -> Provenance
|
||||
.newInstance(
|
||||
paction.getClassid(),
|
||||
paction.getClassname(),
|
||||
dinfo.getTrust()))
|
||||
.orElse(
|
||||
Provenance
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
|
||||
|
@ -25,7 +26,6 @@ import eu.dnetlib.dhp.schema.oaf.*;
|
|||
* with this view for both the source and the target
|
||||
*/
|
||||
|
||||
|
||||
public class SparkSelectValidRelationsJob implements Serializable {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(SparkSelectValidRelationsJob.class);
|
||||
|
@ -114,7 +114,7 @@ public class SparkSelectValidRelationsJob implements Serializable {
|
|||
"UNION ALL " +
|
||||
"SELECT id " +
|
||||
"FROM datasource " +
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false " )
|
||||
"WHERE datainfo.deletedbyinference = false AND datainfo.invisible = false ")
|
||||
.createOrReplaceTempView("identifiers");
|
||||
|
||||
spark
|
||||
|
|
|
@ -436,13 +436,15 @@ public class DumpJobTest {
|
|||
.createDataset(tmp.rdd(), Encoders.bean(GraphResult.class));
|
||||
|
||||
Assertions.assertEquals(23, verificationDataset.count());
|
||||
//verificationDataset.show(false);
|
||||
// verificationDataset.show(false);
|
||||
|
||||
Assertions.assertEquals(23, verificationDataset.filter("type = 'publication'").count());
|
||||
|
||||
verificationDataset.createOrReplaceTempView("check");
|
||||
|
||||
org.apache.spark.sql.Dataset<Row> temp = spark.sql("select id " +
|
||||
org.apache.spark.sql.Dataset<Row> temp = spark
|
||||
.sql(
|
||||
"select id " +
|
||||
"from check " +
|
||||
"lateral view explode (instance) i as inst " +
|
||||
"where inst.articleprocessingcharge is not null");
|
||||
|
@ -453,8 +455,6 @@ public class DumpJobTest {
|
|||
|
||||
Assertions.assertTrue(temp.filter("id = '50|dedup_wf_001::01e6a28565ca01376b7548e530c6f6e8'").count() == 1);
|
||||
|
||||
|
||||
|
||||
// verificationDataset.filter("bestAccessright.code = 'c_abf2'").count() == verificationDataset
|
||||
// .filter("bestAccessright.code = 'c_abf2' and bestAccessright.label = 'OPEN'")
|
||||
// .count()
|
||||
|
|
|
@ -97,7 +97,7 @@ public class CreateEntityTest {
|
|||
Assertions.assertEquals(12, riList.size());
|
||||
|
||||
riList.stream().forEach(c -> {
|
||||
switch (c.getOriginalId()) {
|
||||
switch (c.getAcronym()) {
|
||||
case "mes":
|
||||
Assertions
|
||||
.assertTrue(c.getType().equals(eu.dnetlib.dhp.oa.graph.dump.Constants.RESEARCH_COMMUNITY));
|
||||
|
@ -115,9 +115,9 @@ public class CreateEntityTest {
|
|||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID, Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5(c.getOriginalId()))));
|
||||
DHPUtils.md5(c.getAcronym()))));
|
||||
Assertions.assertTrue(c.getZenodo_community().equals("https://zenodo.org/communities/oac_mes"));
|
||||
Assertions.assertTrue("mes".equals(c.getOriginalId()));
|
||||
Assertions.assertTrue("mes".equals(c.getAcronym()));
|
||||
break;
|
||||
case "clarin":
|
||||
Assertions
|
||||
|
@ -130,9 +130,9 @@ public class CreateEntityTest {
|
|||
String
|
||||
.format(
|
||||
"%s|%s::%s", Constants.CONTEXT_ID, Constants.CONTEXT_NS_PREFIX,
|
||||
DHPUtils.md5(c.getOriginalId()))));
|
||||
DHPUtils.md5(c.getAcronym()))));
|
||||
Assertions.assertTrue(c.getZenodo_community().equals("https://zenodo.org/communities/oac_clarin"));
|
||||
Assertions.assertTrue("clarin".equals(c.getOriginalId()));
|
||||
Assertions.assertTrue("clarin".equals(c.getAcronym()));
|
||||
break;
|
||||
}
|
||||
// TODO add check for all the others Entities
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.dump.complete;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
Loading…
Reference in New Issue