forked from D-Net/dnet-hadoop
Merge commit 'cb7c07c54e59675e8dffe42b7f2a13f16c956068' into beta2master_sept_2022
This commit is contained in:
commit
45fc5e12be
|
@ -5,13 +5,71 @@ import java.io.BufferedInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
|
||||||
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
|
|
||||||
public class MakeTarArchive implements Serializable {
|
public class MakeTarArchive implements Serializable {
|
||||||
|
|
||||||
|
private static final Logger log = LoggerFactory.getLogger(MakeTarArchive.class);
|
||||||
|
|
||||||
|
public static void main(String[] args) throws Exception {
|
||||||
|
String jsonConfiguration = IOUtils
|
||||||
|
.toString(
|
||||||
|
MakeTarArchive.class
|
||||||
|
.getResourceAsStream(
|
||||||
|
"/eu/dnetlib/dhp/common/input_maketar_parameters.json"));
|
||||||
|
|
||||||
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
|
||||||
|
parser.parseArgument(args);
|
||||||
|
|
||||||
|
final String outputPath = parser.get("hdfsPath");
|
||||||
|
log.info("hdfsPath: {}", outputPath);
|
||||||
|
|
||||||
|
final String hdfsNameNode = parser.get("nameNode");
|
||||||
|
log.info("nameNode: {}", hdfsNameNode);
|
||||||
|
|
||||||
|
final String inputPath = parser.get("sourcePath");
|
||||||
|
log.info("input path : {}", inputPath);
|
||||||
|
|
||||||
|
final int gBperSplit = Optional
|
||||||
|
.ofNullable(parser.get("splitSize"))
|
||||||
|
.map(Integer::valueOf)
|
||||||
|
.orElse(10);
|
||||||
|
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
conf.set("fs.defaultFS", hdfsNameNode);
|
||||||
|
|
||||||
|
FileSystem fileSystem = FileSystem.get(conf);
|
||||||
|
|
||||||
|
makeTArArchive(fileSystem, inputPath, outputPath, gBperSplit);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void makeTArArchive(FileSystem fileSystem, String inputPath, String outputPath, int gBperSplit)
|
||||||
|
throws IOException {
|
||||||
|
|
||||||
|
RemoteIterator<LocatedFileStatus> dirIterator = fileSystem.listLocatedStatus(new Path(inputPath));
|
||||||
|
|
||||||
|
while (dirIterator.hasNext()) {
|
||||||
|
LocatedFileStatus fileStatus = dirIterator.next();
|
||||||
|
|
||||||
|
Path p = fileStatus.getPath();
|
||||||
|
String pathString = p.toString();
|
||||||
|
String entity = pathString.substring(pathString.lastIndexOf("/") + 1);
|
||||||
|
|
||||||
|
MakeTarArchive.tarMaxSize(fileSystem, pathString, outputPath + "/" + entity, entity, gBperSplit);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
|
private static TarArchiveOutputStream getTar(FileSystem fileSystem, String outputPath) throws IOException {
|
||||||
Path hdfsWritePath = new Path(outputPath);
|
Path hdfsWritePath = new Path(outputPath);
|
||||||
if (fileSystem.exists(hdfsWritePath)) {
|
if (fileSystem.exists(hdfsWritePath)) {
|
||||||
|
@ -21,7 +79,7 @@ public class MakeTarArchive implements Serializable {
|
||||||
return new TarArchiveOutputStream(fileSystem.create(hdfsWritePath).getWrappedStream());
|
return new TarArchiveOutputStream(fileSystem.create(hdfsWritePath).getWrappedStream());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dir_name)
|
private static void write(FileSystem fileSystem, String inputPath, String outputPath, String dirName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
Path hdfsWritePath = new Path(outputPath);
|
Path hdfsWritePath = new Path(outputPath);
|
||||||
|
@ -37,7 +95,7 @@ public class MakeTarArchive implements Serializable {
|
||||||
new Path(inputPath), true);
|
new Path(inputPath), true);
|
||||||
|
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
writeCurrentFile(fileSystem, dir_name, iterator, ar, 0);
|
writeCurrentFile(fileSystem, dirName, iterator, ar, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -59,32 +117,30 @@ public class MakeTarArchive implements Serializable {
|
||||||
new Path(inputPath), true);
|
new Path(inputPath), true);
|
||||||
boolean next = fileStatusListIterator.hasNext();
|
boolean next = fileStatusListIterator.hasNext();
|
||||||
while (next) {
|
while (next) {
|
||||||
TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar");
|
try (TarArchiveOutputStream ar = getTar(fileSystem, outputPath + "_" + (partNum + 1) + ".tar")) {
|
||||||
|
|
||||||
long current_size = 0;
|
long currentSize = 0;
|
||||||
while (next && current_size < bytesPerSplit) {
|
while (next && currentSize < bytesPerSplit) {
|
||||||
current_size = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, current_size);
|
currentSize = writeCurrentFile(fileSystem, dir_name, fileStatusListIterator, ar, currentSize);
|
||||||
next = fileStatusListIterator.hasNext();
|
next = fileStatusListIterator.hasNext();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
partNum += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
partNum += 1;
|
|
||||||
ar.close();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static long writeCurrentFile(FileSystem fileSystem, String dir_name,
|
private static long writeCurrentFile(FileSystem fileSystem, String dirName,
|
||||||
RemoteIterator<LocatedFileStatus> fileStatusListIterator,
|
RemoteIterator<LocatedFileStatus> fileStatusListIterator,
|
||||||
TarArchiveOutputStream ar, long current_size) throws IOException {
|
TarArchiveOutputStream ar, long currentSize) throws IOException {
|
||||||
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
LocatedFileStatus fileStatus = fileStatusListIterator.next();
|
||||||
|
|
||||||
Path p = fileStatus.getPath();
|
Path p = fileStatus.getPath();
|
||||||
String p_string = p.toString();
|
String pString = p.toString();
|
||||||
if (!p_string.endsWith("_SUCCESS")) {
|
if (!pString.endsWith("_SUCCESS")) {
|
||||||
String name = p_string.substring(p_string.lastIndexOf("/") + 1);
|
String name = pString.substring(pString.lastIndexOf("/") + 1);
|
||||||
if (name.startsWith("part-") & name.length() > 10) {
|
if (name.startsWith("part-") & name.length() > 10) {
|
||||||
String tmp = name.substring(0, 10);
|
String tmp = name.substring(0, 10);
|
||||||
if (name.contains(".")) {
|
if (name.contains(".")) {
|
||||||
|
@ -92,9 +148,9 @@ public class MakeTarArchive implements Serializable {
|
||||||
}
|
}
|
||||||
name = tmp;
|
name = tmp;
|
||||||
}
|
}
|
||||||
TarArchiveEntry entry = new TarArchiveEntry(dir_name + "/" + name);
|
TarArchiveEntry entry = new TarArchiveEntry(dirName + "/" + name);
|
||||||
entry.setSize(fileStatus.getLen());
|
entry.setSize(fileStatus.getLen());
|
||||||
current_size += fileStatus.getLen();
|
currentSize += fileStatus.getLen();
|
||||||
ar.putArchiveEntry(entry);
|
ar.putArchiveEntry(entry);
|
||||||
|
|
||||||
InputStream is = fileSystem.open(fileStatus.getPath());
|
InputStream is = fileSystem.open(fileStatus.getPath());
|
||||||
|
@ -110,7 +166,7 @@ public class MakeTarArchive implements Serializable {
|
||||||
ar.closeArchiveEntry();
|
ar.closeArchiveEntry();
|
||||||
|
|
||||||
}
|
}
|
||||||
return current_size;
|
return currentSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
[
|
||||||
|
|
||||||
|
{
|
||||||
|
"paramName":"s",
|
||||||
|
"paramLongName":"sourcePath",
|
||||||
|
"paramDescription": "the path of the sequencial file to read",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "hdp",
|
||||||
|
"paramLongName": "hdfsPath",
|
||||||
|
"paramDescription": "the path used to store the output archive",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName":"nn",
|
||||||
|
"paramLongName":"nameNode",
|
||||||
|
"paramDescription": "the name node",
|
||||||
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName":"ss",
|
||||||
|
"paramLongName":"splitSize",
|
||||||
|
"paramDescription": "the maximum size of the archive",
|
||||||
|
"paramRequired": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,5 +2,6 @@
|
||||||
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
{"paramName":"mt", "paramLongName":"master", "paramDescription": "should be local or yarn", "paramRequired": true},
|
||||||
{"paramName":"r", "paramLongName":"relationPath", "paramDescription": "the relation resolved Path", "paramRequired": true},
|
{"paramName":"r", "paramLongName":"relationPath", "paramDescription": "the relation resolved Path", "paramRequired": true},
|
||||||
{"paramName":"s", "paramLongName":"summaryPath", "paramDescription": "the summary Path", "paramRequired": true},
|
{"paramName":"s", "paramLongName":"summaryPath", "paramDescription": "the summary Path", "paramRequired": true},
|
||||||
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the target base path of the scholix", "paramRequired": true}
|
{"paramName":"t", "paramLongName":"targetPath", "paramDescription": "the target base path of the scholix", "paramRequired": true},
|
||||||
|
{"paramName":"dc", "paramLongName":"dumpCitations", "paramDescription": "should dump citation relations", "paramRequired": false}
|
||||||
]
|
]
|
|
@ -16,7 +16,11 @@
|
||||||
<name>maxNumberOfPid</name>
|
<name>maxNumberOfPid</name>
|
||||||
<description>filter relation with at least #maxNumberOfPid</description>
|
<description>filter relation with at least #maxNumberOfPid</description>
|
||||||
</property>
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>dumpCitations</name>
|
||||||
|
<value>false</value>
|
||||||
|
<description>should dump citation relations</description>
|
||||||
|
</property>
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="ImportDatasetEntities"/>
|
<start to="ImportDatasetEntities"/>
|
||||||
|
@ -98,6 +102,7 @@
|
||||||
<arg>--summaryPath</arg><arg>${targetPath}/provision/summaries</arg>
|
<arg>--summaryPath</arg><arg>${targetPath}/provision/summaries</arg>
|
||||||
<arg>--targetPath</arg><arg>${targetPath}/provision/scholix</arg>
|
<arg>--targetPath</arg><arg>${targetPath}/provision/scholix</arg>
|
||||||
<arg>--relationPath</arg><arg>${targetPath}/relation</arg>
|
<arg>--relationPath</arg><arg>${targetPath}/relation</arg>
|
||||||
|
<arg>--dumpCitations</arg><arg>${dumpCitations}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
<ok to="DropJSONPath"/>
|
<ok to="DropJSONPath"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
|
@ -135,11 +140,21 @@
|
||||||
<arg>--objectType</arg><arg>scholix</arg>
|
<arg>--objectType</arg><arg>scholix</arg>
|
||||||
<arg>--maxPidNumberFilter</arg><arg>maxNumberOfPid</arg>
|
<arg>--maxPidNumberFilter</arg><arg>maxNumberOfPid</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
<ok to="make_tar"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
<action name="make_tar">
|
||||||
|
<java>
|
||||||
|
<main-class>eu.dnetlib.dhp.common.MakeTarArchive</main-class>
|
||||||
|
<arg>--nameNode</arg><arg>${nameNode}</arg>
|
||||||
|
<arg>--hdfsPath</arg><arg>${targetPath}/tar</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${targetPath}/json</arg>
|
||||||
|
</java>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
|
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -1,7 +1,10 @@
|
||||||
package eu.dnetlib.dhp.sx.graph
|
package eu.dnetlib.dhp.sx.graph
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper
|
import com.fasterxml.jackson.databind.ObjectMapper
|
||||||
|
import com.fasterxml.jackson.module.scala.DefaultScalaModule
|
||||||
|
import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser
|
||||||
|
import eu.dnetlib.dhp.schema.common.ModelConstants
|
||||||
import eu.dnetlib.dhp.schema.oaf.{OtherResearchProduct, Publication, Relation, Result, Software, Dataset => OafDataset}
|
import eu.dnetlib.dhp.schema.oaf.{OtherResearchProduct, Publication, Relation, Result, Software, Dataset => OafDataset}
|
||||||
import org.apache.commons.io.IOUtils
|
import org.apache.commons.io.IOUtils
|
||||||
import org.apache.commons.lang3.StringUtils
|
import org.apache.commons.lang3.StringUtils
|
||||||
|
@ -9,7 +12,8 @@ import org.apache.spark.SparkConf
|
||||||
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
import org.apache.spark.sql.{Encoder, Encoders, SaveMode, SparkSession}
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
import scala.collection.JavaConverters._
|
import scala.reflect.ClassTag
|
||||||
|
import scala.util.Try
|
||||||
|
|
||||||
object SparkConvertRDDtoDataset {
|
object SparkConvertRDDtoDataset {
|
||||||
|
|
||||||
|
@ -36,11 +40,12 @@ object SparkConvertRDDtoDataset {
|
||||||
val t = parser.get("targetPath")
|
val t = parser.get("targetPath")
|
||||||
log.info(s"targetPath -> $t")
|
log.info(s"targetPath -> $t")
|
||||||
|
|
||||||
val filterRelation = parser.get("filterRelation")
|
val subRelTypeFilter = parser.get("filterRelation")
|
||||||
log.info(s"filterRelation -> $filterRelation")
|
log.info(s"filterRelation -> $subRelTypeFilter")
|
||||||
|
|
||||||
val entityPath = s"$t/entities"
|
val entityPath = s"$t/entities"
|
||||||
val relPath = s"$t/relation"
|
val relPath = s"$t/relation"
|
||||||
|
|
||||||
val mapper = new ObjectMapper()
|
val mapper = new ObjectMapper()
|
||||||
implicit val datasetEncoder: Encoder[OafDataset] = Encoders.kryo(classOf[OafDataset])
|
implicit val datasetEncoder: Encoder[OafDataset] = Encoders.kryo(classOf[OafDataset])
|
||||||
implicit val publicationEncoder: Encoder[Publication] = Encoders.kryo(classOf[Publication])
|
implicit val publicationEncoder: Encoder[Publication] = Encoders.kryo(classOf[Publication])
|
||||||
|
@ -99,44 +104,66 @@ object SparkConvertRDDtoDataset {
|
||||||
|
|
||||||
log.info("Converting Relation")
|
log.info("Converting Relation")
|
||||||
|
|
||||||
if (filterRelation != null && StringUtils.isNoneBlank(filterRelation)) {
|
val relClassFilter = List(
|
||||||
|
ModelConstants.MERGES,
|
||||||
|
ModelConstants.IS_MERGED_IN,
|
||||||
|
ModelConstants.HAS_AMONG_TOP_N_SIMILAR_DOCS,
|
||||||
|
ModelConstants.IS_AMONG_TOP_N_SIMILAR_DOCS
|
||||||
|
)
|
||||||
|
|
||||||
val rddRelation = spark.sparkContext
|
val rddRelation = spark.sparkContext
|
||||||
.textFile(s"$sourcePath/relation")
|
.textFile(s"$sourcePath/relation")
|
||||||
.map(s => mapper.readValue(s, classOf[Relation]))
|
.map(s => mapper.readValue(s, classOf[Relation]))
|
||||||
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
.filter(r => r.getDataInfo != null && !r.getDataInfo.getDeletedbyinference)
|
||||||
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
||||||
//filter OpenCitations relations
|
.filter(r => filterRelations(subRelTypeFilter, relClassFilter, r))
|
||||||
.filter(r =>
|
//filter OpenCitations relations
|
||||||
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
.filter(r =>
|
||||||
"opencitations".equalsIgnoreCase(k.getValue)
|
r.getDataInfo.getProvenanceaction != null &&
|
||||||
)
|
!"sysimport:crosswalk:opencitations".equals(r.getDataInfo.getProvenanceaction.getClassid)
|
||||||
)
|
|
||||||
.filter(r => r.getSubRelType != null && r.getSubRelType.equalsIgnoreCase(filterRelation))
|
|
||||||
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
|
||||||
} else {
|
|
||||||
|
|
||||||
val relationSemanticFilter = List(
|
|
||||||
"merges",
|
|
||||||
"ismergedin",
|
|
||||||
"HasAmongTopNSimilarDocuments",
|
|
||||||
"IsAmongTopNSimilarDocuments"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
val rddRelation = spark.sparkContext
|
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
||||||
.textFile(s"$sourcePath/relation")
|
|
||||||
.map(s => mapper.readValue(s, classOf[Relation]))
|
|
||||||
.filter(r => r.getDataInfo != null && r.getDataInfo.getDeletedbyinference == false)
|
|
||||||
.filter(r => r.getSource.startsWith("50") && r.getTarget.startsWith("50"))
|
|
||||||
//filter OpenCitations relations
|
|
||||||
.filter(r =>
|
|
||||||
r.getCollectedfrom != null && r.getCollectedfrom.size() > 0 && !r.getCollectedfrom.asScala.exists(k =>
|
|
||||||
"opencitations".equalsIgnoreCase(k.getValue)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.filter(r => !relationSemanticFilter.exists(k => k.equalsIgnoreCase(r.getRelClass)))
|
|
||||||
spark.createDataset(rddRelation).as[Relation].write.mode(SaveMode.Overwrite).save(s"$relPath")
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private def filterRelations(subRelTypeFilter: String, relClassFilter: List[String], r: Relation): Boolean = {
|
||||||
|
if (StringUtils.isNotBlank(subRelTypeFilter)) {
|
||||||
|
subRelTypeFilter.equalsIgnoreCase(r.getSubRelType)
|
||||||
|
} else {
|
||||||
|
!relClassFilter.exists(k => k.equalsIgnoreCase(r.getRelClass))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
//TODO: finalise implementation
|
||||||
|
private def processResult[T<: Result](
|
||||||
|
implicit ct: ClassTag[T],
|
||||||
|
log: Logger,
|
||||||
|
spark: SparkSession,
|
||||||
|
sourcePath: String,
|
||||||
|
entityPath: String,
|
||||||
|
clazz: Class[T]
|
||||||
|
): Unit = {
|
||||||
|
val entityType = clazz.getSimpleName.toLowerCase
|
||||||
|
|
||||||
|
log.info(s"Converting $entityType")
|
||||||
|
|
||||||
|
val mapper = new ObjectMapper() with ScalaObjectMapper
|
||||||
|
mapper.registerModule(DefaultScalaModule)
|
||||||
|
|
||||||
|
val rdd = spark.sparkContext
|
||||||
|
.textFile(s"$sourcePath/$entityType")
|
||||||
|
.map(s => mapper.readValue(s, clazz))
|
||||||
|
.filter(r => r.getDataInfo != null && !r.getDataInfo.getDeletedbyinference);
|
||||||
|
|
||||||
|
implicit val encoder: Encoder[T] = Encoders.kryo(clazz)
|
||||||
|
spark
|
||||||
|
.createDataset(rdd)
|
||||||
|
.as[T]
|
||||||
|
.write
|
||||||
|
.mode(SaveMode.Overwrite)
|
||||||
|
.save(s"$entityPath/$entityType")
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,8 @@ import org.apache.spark.sql.functions.count
|
||||||
import org.apache.spark.sql._
|
import org.apache.spark.sql._
|
||||||
import org.slf4j.{Logger, LoggerFactory}
|
import org.slf4j.{Logger, LoggerFactory}
|
||||||
|
|
||||||
|
import scala.util.Try
|
||||||
|
|
||||||
object SparkCreateScholix {
|
object SparkCreateScholix {
|
||||||
|
|
||||||
def main(args: Array[String]): Unit = {
|
def main(args: Array[String]): Unit = {
|
||||||
|
@ -37,6 +39,8 @@ object SparkCreateScholix {
|
||||||
log.info(s"summaryPath -> $summaryPath")
|
log.info(s"summaryPath -> $summaryPath")
|
||||||
val targetPath = parser.get("targetPath")
|
val targetPath = parser.get("targetPath")
|
||||||
log.info(s"targetPath -> $targetPath")
|
log.info(s"targetPath -> $targetPath")
|
||||||
|
val dumpCitations = Try(parser.get("dumpCitations").toBoolean).getOrElse(false)
|
||||||
|
log.info(s"dumpCitations -> $dumpCitations")
|
||||||
|
|
||||||
implicit val relEncoder: Encoder[Relation] = Encoders.kryo[Relation]
|
implicit val relEncoder: Encoder[Relation] = Encoders.kryo[Relation]
|
||||||
implicit val summaryEncoder: Encoder[ScholixSummary] = Encoders.kryo[ScholixSummary]
|
implicit val summaryEncoder: Encoder[ScholixSummary] = Encoders.kryo[ScholixSummary]
|
||||||
|
@ -138,7 +142,7 @@ object SparkCreateScholix {
|
||||||
val relatedEntitiesDS: Dataset[RelatedEntities] = spark.read
|
val relatedEntitiesDS: Dataset[RelatedEntities] = spark.read
|
||||||
.load(s"$targetPath/related_entities")
|
.load(s"$targetPath/related_entities")
|
||||||
.as[RelatedEntities]
|
.as[RelatedEntities]
|
||||||
.filter(r => r.relatedPublication > 0 || r.relatedDataset > 0)
|
.filter(r => dumpCitations || r.relatedPublication > 0 || r.relatedDataset > 0)
|
||||||
|
|
||||||
relatedEntitiesDS
|
relatedEntitiesDS
|
||||||
.joinWith(summaryDS, relatedEntitiesDS("id").equalTo(summaryDS("_1")), "inner")
|
.joinWith(summaryDS, relatedEntitiesDS("id").equalTo(summaryDS("_1")), "inner")
|
||||||
|
|
Loading…
Reference in New Issue