updated unpaywall mapping
This commit is contained in:
parent
b32655e48e
commit
7b28783fb4
|
@ -3,16 +3,20 @@ package eu.dnetlib.dhp.provision;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
|
||||||
import eu.dnetlib.dhp.provision.scholix.Scholix;
|
import eu.dnetlib.dhp.provision.scholix.Scholix;
|
||||||
import eu.dnetlib.dhp.utils.DHPUtils;
|
import eu.dnetlib.dhp.provision.scholix.ScholixResource;
|
||||||
|
import eu.dnetlib.dhp.provision.scholix.summary.ScholixSummary;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.spark.api.java.JavaPairRDD;
|
import org.apache.spark.api.java.JavaPairRDD;
|
||||||
import org.apache.spark.api.java.JavaRDD;
|
|
||||||
import org.apache.spark.api.java.JavaSparkContext;
|
import org.apache.spark.api.java.JavaSparkContext;
|
||||||
import org.apache.spark.api.java.function.PairFunction;
|
import org.apache.spark.api.java.function.PairFlatMapFunction;
|
||||||
import org.apache.spark.sql.SparkSession;
|
import org.apache.spark.sql.SparkSession;
|
||||||
import scala.Tuple2;
|
import scala.Tuple2;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Random;
|
||||||
|
|
||||||
public class SparkGenerateScholix {
|
public class SparkGenerateScholix {
|
||||||
|
|
||||||
private static final String jsonIDPath = "$.id";
|
private static final String jsonIDPath = "$.id";
|
||||||
|
@ -21,6 +25,8 @@ public class SparkGenerateScholix {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static void main(String[] args) throws Exception {
|
public static void main(String[] args) throws Exception {
|
||||||
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGenerateScholix.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_generate_summary_parameters.json")));
|
final ArgumentApplicationParser parser = new ArgumentApplicationParser(IOUtils.toString(SparkGenerateScholix.class.getResourceAsStream("/eu/dnetlib/dhp/provision/input_generate_summary_parameters.json")));
|
||||||
parser.parseArgument(args);
|
parser.parseArgument(args);
|
||||||
|
@ -37,29 +43,48 @@ public class SparkGenerateScholix {
|
||||||
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
final JavaSparkContext sc = new JavaSparkContext(spark.sparkContext());
|
||||||
|
|
||||||
|
|
||||||
final JavaRDD<String> relationToExport = sc.textFile(graphPath + "/relation").filter(ProvisionUtil::isNotDeleted).repartition(4000);
|
// final JavaRDD<String> relationToExport = sc.textFile(graphPath + "/relation").filter(ProvisionUtil::isNotDeleted).repartition(4000);
|
||||||
final JavaPairRDD<String,String> scholixSummary = sc.textFile(workingDirPath + "/summary").mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(jsonIDPath, i), i));
|
final JavaPairRDD<String,ScholixResource> scholixSummary =
|
||||||
scholixSummary.join(
|
sc.textFile(workingDirPath + "/summary")
|
||||||
relationToExport
|
.flatMapToPair((PairFlatMapFunction<String, String, ScholixResource>) i -> {
|
||||||
.mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(sourceIDPath, i), i)))
|
final ObjectMapper mapper = new ObjectMapper();
|
||||||
.map(Tuple2::_2)
|
final ScholixSummary summary = mapper.readValue(i, ScholixSummary.class);
|
||||||
.mapToPair(summaryRelation ->
|
ScholixResource tmp = ScholixResource.fromSummary(summary);
|
||||||
new Tuple2<>(
|
final List<Tuple2<String, ScholixResource>> result = new ArrayList<>();
|
||||||
DHPUtils.getJPathString(targetIDPath, summaryRelation._2()),
|
for (int k = 0; k<10; k++)
|
||||||
Scholix.generateScholixWithSource(summaryRelation._1(), summaryRelation._2())))
|
result.add(new Tuple2<>(String.format("%s::%d", tmp.getDnetIdentifier(), k), tmp));
|
||||||
// .join(scholixSummary)
|
return result.iterator();
|
||||||
|
});
|
||||||
|
// scholixSummary.join(
|
||||||
|
// relationToExport
|
||||||
|
// .mapToPair((PairFunction<String, String, String>) i -> new Tuple2<>(DHPUtils.getJPathString(sourceIDPath, i), i)))
|
||||||
// .map(Tuple2::_2)
|
// .map(Tuple2::_2)
|
||||||
// .map(i -> i._1().addTarget(i._2()))
|
// .mapToPair(summaryRelation ->
|
||||||
.map(s-> {
|
// new Tuple2<>(
|
||||||
|
// DHPUtils.getJPathString(targetIDPath, summaryRelation._2()),
|
||||||
|
// Scholix.generateScholixWithSource(summaryRelation._1(), summaryRelation._2())))
|
||||||
|
//
|
||||||
|
// .map(t-> t._2().setTarget(new ScholixResource().setDnetIdentifier(t._1())))
|
||||||
|
// .map(s-> {
|
||||||
|
// ObjectMapper mapper = new ObjectMapper();
|
||||||
|
// return mapper.writeValueAsString(s);
|
||||||
|
// })
|
||||||
|
// .saveAsTextFile(workingDirPath + "/scholix", GzipCodec.class);
|
||||||
|
|
||||||
|
sc.textFile(workingDirPath + "/scholix")
|
||||||
|
.mapToPair(t -> {
|
||||||
ObjectMapper mapper = new ObjectMapper();
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
return mapper.writeValueAsString(s);
|
Scholix scholix = mapper.readValue(t, Scholix.class);
|
||||||
|
Random rand = new Random();
|
||||||
|
return new Tuple2<>(String.format("%s::%d",scholix.getTarget().getDnetIdentifier(), rand.nextInt(10)), scholix);
|
||||||
})
|
})
|
||||||
.saveAsTextFile(workingDirPath + "/scholix", GzipCodec.class);
|
.join(scholixSummary)
|
||||||
|
.map(t-> {
|
||||||
|
Scholix item = t._2()._1().setTarget(t._2()._2());
|
||||||
;
|
item.generateIdentifier();
|
||||||
|
return item;
|
||||||
|
})
|
||||||
|
.map(s-> new ObjectMapper().writeValueAsString(s)).saveAsTextFile(workingDirPath + "/scholix_index", GzipCodec.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ public class SparkIndexCollectionOnES {
|
||||||
|
|
||||||
final String sourcePath = parser.get("sourcePath");
|
final String sourcePath = parser.get("sourcePath");
|
||||||
final String index = parser.get("index");
|
final String index = parser.get("index");
|
||||||
|
final String idPath = parser.get("idPath");
|
||||||
|
|
||||||
final SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
|
final SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
|
||||||
|
|
||||||
|
@ -34,7 +35,7 @@ public class SparkIndexCollectionOnES {
|
||||||
|
|
||||||
Map<String, String> esCfg = new HashMap<>();
|
Map<String, String> esCfg = new HashMap<>();
|
||||||
esCfg.put("es.nodes", "10.19.65.51, 10.19.65.52, 10.19.65.53, 10.19.65.54");
|
esCfg.put("es.nodes", "10.19.65.51, 10.19.65.52, 10.19.65.53, 10.19.65.54");
|
||||||
esCfg.put("es.mapping.id", "id");
|
esCfg.put("es.mapping.id", idPath);
|
||||||
esCfg.put("es.batch.write.retry.count", "8");
|
esCfg.put("es.batch.write.retry.count", "8");
|
||||||
esCfg.put("es.batch.write.retry.wait", "60s");
|
esCfg.put("es.batch.write.retry.wait", "60s");
|
||||||
esCfg.put("es.batch.size.entries", "200");
|
esCfg.put("es.batch.size.entries", "200");
|
||||||
|
|
|
@ -47,7 +47,7 @@ public class Scholix implements Serializable {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private void generateIdentifier( ) {
|
public void generateIdentifier( ) {
|
||||||
setIdentifier(DHPUtils.md5(String.format("%s::%s::%s",source.getDnetIdentifier(),relationship.getName(), target.getDnetIdentifier())));
|
setIdentifier(DHPUtils.md5(String.format("%s::%s::%s",source.getDnetIdentifier(),relationship.getName(), target.getDnetIdentifier())));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,9 +25,19 @@
|
||||||
<description>number of cores used by single executor</description>
|
<description>number of cores used by single executor</description>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<name>idScholix</name>
|
||||||
|
<description>the </description>
|
||||||
|
</property>
|
||||||
|
<property>
|
||||||
|
<name>idSummary</name>
|
||||||
|
<description>number of cores used by single executor</description>
|
||||||
|
</property>
|
||||||
|
|
||||||
|
|
||||||
</parameters>
|
</parameters>
|
||||||
|
|
||||||
<start to="generateScholix"/>
|
<start to="indexScholix"/>
|
||||||
|
|
||||||
<kill name="Kill">
|
<kill name="Kill">
|
||||||
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
|
||||||
|
@ -103,7 +113,7 @@
|
||||||
<name-node>${nameNode}</name-node>
|
<name-node>${nameNode}</name-node>
|
||||||
<master>yarn-cluster</master>
|
<master>yarn-cluster</master>
|
||||||
<mode>cluster</mode>
|
<mode>cluster</mode>
|
||||||
<name>generate Summary</name>
|
<name>generate Scholix</name>
|
||||||
<class>eu.dnetlib.dhp.provision.SparkGenerateScholix</class>
|
<class>eu.dnetlib.dhp.provision.SparkGenerateScholix</class>
|
||||||
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
||||||
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts>
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --driver-memory=${sparkDriverMemory} ${sparkExtraOPT}</spark-opts>
|
||||||
|
@ -111,9 +121,29 @@
|
||||||
<arg>--workingDirPath</arg><arg>${workingDirPath}</arg>
|
<arg>--workingDirPath</arg><arg>${workingDirPath}</arg>
|
||||||
<arg>--graphPath</arg><arg>${graphPath}</arg>
|
<arg>--graphPath</arg><arg>${graphPath}</arg>
|
||||||
</spark>
|
</spark>
|
||||||
|
<ok to="indexScholix"/>
|
||||||
|
<error to="Kill"/>
|
||||||
|
</action>
|
||||||
|
|
||||||
|
|
||||||
|
<action name="indexScholix">
|
||||||
|
<spark xmlns="uri:oozie:spark-action:0.2">
|
||||||
|
<job-tracker>${jobTracker}</job-tracker>
|
||||||
|
<name-node>${nameNode}</name-node>
|
||||||
|
<master>yarn-cluster</master>
|
||||||
|
<mode>cluster</mode>
|
||||||
|
<name>index scholix</name>
|
||||||
|
<class>eu.dnetlib.dhp.provision.SparkIndexCollectionOnES</class>
|
||||||
|
<jar>dhp-graph-provision-${projectVersion}.jar</jar>
|
||||||
|
<spark-opts>--executor-memory ${sparkExecutorMemory} --num-executors 20 --driver-memory=${sparkDriverMemory} ${sparkExtraOPT} --conf spark.dynamicAllocation.maxExecutors="32" </spark-opts>
|
||||||
|
<arg>-mt</arg> <arg>yarn-cluster</arg>
|
||||||
|
<arg>--sourcePath</arg><arg>${workingDirPath}/scholix_index</arg>
|
||||||
|
<arg>--index</arg><arg>${index}_scholix</arg>
|
||||||
|
</spark>
|
||||||
<ok to="End"/>
|
<ok to="End"/>
|
||||||
<error to="Kill"/>
|
<error to="Kill"/>
|
||||||
</action>
|
</action>
|
||||||
|
|
||||||
|
|
||||||
<end name="End"/>
|
<end name="End"/>
|
||||||
</workflow-app>
|
</workflow-app>
|
|
@ -16,5 +16,11 @@
|
||||||
"paramLongName": "index",
|
"paramLongName": "index",
|
||||||
"paramDescription": "the index name",
|
"paramDescription": "the index name",
|
||||||
"paramRequired": true
|
"paramRequired": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paramName": "id",
|
||||||
|
"paramLongName": "idPath",
|
||||||
|
"paramDescription": "the identifier field name",
|
||||||
|
"paramRequired": true
|
||||||
}
|
}
|
||||||
]
|
]
|
|
@ -1,5 +1,6 @@
|
||||||
package eu.dnetlib.dhp.provision;
|
package eu.dnetlib.dhp.provision;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.type.TypeReference;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import eu.dnetlib.dhp.provision.scholix.Scholix;
|
import eu.dnetlib.dhp.provision.scholix.Scholix;
|
||||||
import eu.dnetlib.dhp.provision.scholix.summary.ScholixSummary;
|
import eu.dnetlib.dhp.provision.scholix.summary.ScholixSummary;
|
||||||
|
@ -7,14 +8,13 @@ import org.apache.commons.io.IOUtils;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import scala.Tuple2;
|
||||||
|
|
||||||
public class ExtractInfoTest {
|
public class ExtractInfoTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void test() throws Exception {
|
public void test() throws Exception {
|
||||||
|
|
||||||
final String json = IOUtils.toString(getClass().getResourceAsStream("record.json"));
|
final String json = IOUtils.toString(getClass().getResourceAsStream("record.json"));
|
||||||
|
|
||||||
|
|
||||||
ProvisionUtil.getItemType(json,ProvisionUtil.TARGETJSONPATH);
|
ProvisionUtil.getItemType(json,ProvisionUtil.TARGETJSONPATH);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,7 @@ public class ExtractInfoTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Ignore
|
@Ignore
|
||||||
public void testIndex() throws Exception {
|
public void testIndex() throws Exception {
|
||||||
|
|
Loading…
Reference in New Issue