forked from D-Net/dnet-hadoop
mergin with branch beta
This commit is contained in:
commit
6d3c4c4abe
|
@ -59,7 +59,12 @@ object SparkConvertRDDtoDataset {
|
||||||
log.info("Converting Relation")
|
log.info("Converting Relation")
|
||||||
|
|
||||||
|
|
||||||
val rddRelation =spark.sparkContext.textFile(s"$sourcePath/relation").map(s => mapper.readValue(s, classOf[Relation])).filter(r=> r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
val relationSemanticFilter = List("cites", "iscitedby","merges", "ismergedin")
|
||||||
|
|
||||||
|
val rddRelation =spark.sparkContext.textFile(s"$sourcePath/relation")
|
||||||
|
.map(s => mapper.readValue(s, classOf[Relation]))
|
||||||
|
.filter(r=> r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
||||||
|
.filter(r => !relationSemanticFilter.exists(k => k.equalsIgnoreCase(r.getRelClass)))
|
||||||
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue