forked from D-Net/dnet-hadoop
enabled test
This commit is contained in:
parent
2d67476417
commit
f31c2e9461
|
@ -27,7 +27,6 @@ import eu.dnetlib.dhp.oa.graph.dump.community.CommunityMap;
|
||||||
import eu.dnetlib.dhp.schema.dump.oaf.graph.Relation;
|
import eu.dnetlib.dhp.schema.dump.oaf.graph.Relation;
|
||||||
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
import eu.dnetlib.enabling.is.lookup.rmi.ISLookUpService;
|
||||||
|
|
||||||
@Disabled
|
|
||||||
public class ExtractRelationFromEntityTest {
|
public class ExtractRelationFromEntityTest {
|
||||||
|
|
||||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||||
|
@ -39,34 +38,6 @@ public class ExtractRelationFromEntityTest {
|
||||||
private static final Logger log = LoggerFactory
|
private static final Logger log = LoggerFactory
|
||||||
.getLogger(ExtractRelationFromEntityTest.class);
|
.getLogger(ExtractRelationFromEntityTest.class);
|
||||||
|
|
||||||
private static CommunityMap map = new CommunityMap();
|
|
||||||
|
|
||||||
static {
|
|
||||||
map.put("egi", "EGI Federation");
|
|
||||||
map.put("fet-fp7", "FET FP7");
|
|
||||||
map.put("fet-h2020", "FET H2020");
|
|
||||||
map.put("clarin", "CLARIN");
|
|
||||||
map.put("fam", "Fisheries and Aquaculture Management");
|
|
||||||
map.put("ni", "Neuroinformatics");
|
|
||||||
map.put("mes", "European Marine Scinece");
|
|
||||||
map.put("instruct", "Instruct-Eric");
|
|
||||||
map.put("rda", "Research Data Alliance");
|
|
||||||
map.put("elixir-gr", "ELIXIR GR");
|
|
||||||
map.put("aginfra", "Agricultural and Food Sciences");
|
|
||||||
map.put("dariah", "DARIAH EU");
|
|
||||||
map.put("risis", "RISI");
|
|
||||||
map.put("ee", "SDSN - Greece");
|
|
||||||
map.put("oa-pg", "EC Post-Grant Open Access Pilot");
|
|
||||||
map.put("beopen", "Transport Research");
|
|
||||||
map.put("euromarine", "Euromarine");
|
|
||||||
map.put("ifremer", "Ifremer");
|
|
||||||
map.put("dh-ch", "Digital Humanities and Cultural Heritage");
|
|
||||||
map.put("science-innovation-policy", "Science and Innovation Policy Studies");
|
|
||||||
map.put("covid-19", "COVID-19");
|
|
||||||
map.put("enrmaps", "Energy Research");
|
|
||||||
map.put("epos", "EPOS");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@BeforeAll
|
@BeforeAll
|
||||||
public static void beforeAll() throws IOException {
|
public static void beforeAll() throws IOException {
|
||||||
|
@ -97,43 +68,43 @@ public class ExtractRelationFromEntityTest {
|
||||||
spark.stop();
|
spark.stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
// @Test
|
@Test
|
||||||
// public void test1() {
|
public void test1() {
|
||||||
//
|
|
||||||
// final String sourcePath = getClass()
|
final String sourcePath = getClass()
|
||||||
// .getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/singelRecord_pub.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/resultDump/singelRecord_pub.json")
|
||||||
// .getPath();
|
.getPath();
|
||||||
//
|
|
||||||
// final String communityMapPath = getClass()
|
final String communityMapPath = getClass()
|
||||||
// .getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
.getResource("/eu/dnetlib/dhp/oa/graph/dump/communityMapPath/communitymap.json")
|
||||||
// .getPath();
|
.getPath();
|
||||||
//
|
|
||||||
// Extractor ex = new Extractor();
|
Extractor ex = new Extractor();
|
||||||
// ex
|
ex
|
||||||
// .run(
|
.run(
|
||||||
// false, sourcePath, workingDir.toString() + "/relation",
|
false, sourcePath, workingDir.toString() + "/relation",
|
||||||
// // eu.dnetlib.dhp.schema.oaf.Publication.class, communityMapPath);
|
// eu.dnetlib.dhp.schema.oaf.Publication.class, communityMapPath);
|
||||||
// eu.dnetlib.dhp.schema.oaf.Publication.class, map);
|
eu.dnetlib.dhp.schema.oaf.Publication.class, communityMapPath);
|
||||||
//
|
|
||||||
// final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
|
||||||
//
|
|
||||||
// JavaRDD<Relation> tmp = sc
|
JavaRDD<Relation> tmp = sc
|
||||||
// .textFile(workingDir.toString() + "/relation")
|
.textFile(workingDir.toString() + "/relation")
|
||||||
// .map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
.map(item -> OBJECT_MAPPER.readValue(item, Relation.class));
|
||||||
//
|
|
||||||
// org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
org.apache.spark.sql.Dataset<Relation> verificationDataset = spark
|
||||||
// .createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
.createDataset(tmp.rdd(), Encoders.bean(Relation.class));
|
||||||
//
|
|
||||||
// Assertions
|
Assertions
|
||||||
// .assertEquals(
|
.assertEquals(
|
||||||
// 9,
|
9,
|
||||||
// verificationDataset.filter("source.id = '50|dedup_wf_001::15270b996fa8fd2fb5723daeab3685c3'").count());
|
verificationDataset.filter("source.id = '50|dedup_wf_001::15270b996fa8fd2fb5723daeab3685c3'").count());
|
||||||
//
|
|
||||||
// Assertions
|
Assertions
|
||||||
// .assertEquals(
|
.assertEquals(
|
||||||
// 9,
|
9,
|
||||||
// verificationDataset.filter("source.id = '50|dedup_wf_001::15270b996fa8fd2fb5723daxab3685c3'").count());
|
verificationDataset.filter("source.id = '50|dedup_wf_001::15270b996fa8fd2fb5723daxab3685c3'").count());
|
||||||
//
|
|
||||||
// }
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue