forked from D-Net/dnet-hadoop
Merge branch 'stable_ids' of code-repo.d4science.org:D-Net/dnet-hadoop into stable_id_scholexplorer
This commit is contained in:
commit
0d1f37302f
|
@ -15,6 +15,12 @@ import org.slf4j.{Logger, LoggerFactory}
|
||||||
object SparkConvertORCIDToOAF {
|
object SparkConvertORCIDToOAF {
|
||||||
val logger: Logger = LoggerFactory.getLogger(SparkConvertORCIDToOAF.getClass)
|
val logger: Logger = LoggerFactory.getLogger(SparkConvertORCIDToOAF.getClass)
|
||||||
|
|
||||||
|
def fixORCIDItem(item :ORCIDItem):ORCIDItem = {
|
||||||
|
new ORCIDItem(item.doi, item.authors.groupBy(_.oid).map(_._2.head).toList)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def run(spark:SparkSession,sourcePath:String,workingPath:String, targetPath:String):Unit = {
|
def run(spark:SparkSession,sourcePath:String,workingPath:String, targetPath:String):Unit = {
|
||||||
import spark.implicits._
|
import spark.implicits._
|
||||||
implicit val mapEncoderPubs: Encoder[Publication] = Encoders.kryo[Publication]
|
implicit val mapEncoderPubs: Encoder[Publication] = Encoders.kryo[Publication]
|
||||||
|
@ -34,10 +40,11 @@ object SparkConvertORCIDToOAF {
|
||||||
works.joinWith(authors, authors("oid").equalTo(works("oid")))
|
works.joinWith(authors, authors("oid").equalTo(works("oid")))
|
||||||
.map(i =>{
|
.map(i =>{
|
||||||
val doi = i._1.doi
|
val doi = i._1.doi
|
||||||
val author = i._2
|
var author = i._2
|
||||||
(doi, author)
|
(doi, author)
|
||||||
}).groupBy(col("_1").alias("doi"))
|
}).groupBy(col("_1").alias("doi"))
|
||||||
.agg(collect_list(col("_2")).alias("authors"))
|
.agg(collect_list(col("_2")).alias("authors")).as[ORCIDItem]
|
||||||
|
.map(s => fixORCIDItem(s))
|
||||||
.write.mode(SaveMode.Overwrite).save(s"$workingPath/orcidworksWithAuthor")
|
.write.mode(SaveMode.Overwrite).save(s"$workingPath/orcidworksWithAuthor")
|
||||||
|
|
||||||
val dataset: Dataset[ORCIDItem] =spark.read.load(s"$workingPath/orcidworksWithAuthor").as[ORCIDItem]
|
val dataset: Dataset[ORCIDItem] =spark.read.load(s"$workingPath/orcidworksWithAuthor").as[ORCIDItem]
|
||||||
|
|
|
@ -108,7 +108,7 @@ public class SparkOrcidToResultFromSemRelJob {
|
||||||
return value -> {
|
return value -> {
|
||||||
R ret = value._1();
|
R ret = value._1();
|
||||||
Optional<ResultOrcidList> rol = Optional.ofNullable(value._2());
|
Optional<ResultOrcidList> rol = Optional.ofNullable(value._2());
|
||||||
if (rol.isPresent()) {
|
if (rol.isPresent() && Optional.ofNullable(ret.getAuthor()).isPresent()) {
|
||||||
List<Author> toenrich_author = ret.getAuthor();
|
List<Author> toenrich_author = ret.getAuthor();
|
||||||
List<AutoritativeAuthor> autoritativeAuthors = rol.get().getAuthorList();
|
List<AutoritativeAuthor> autoritativeAuthors = rol.get().getAuthorList();
|
||||||
for (Author author : toenrich_author) {
|
for (Author author : toenrich_author) {
|
||||||
|
|
Loading…
Reference in New Issue