[personEntity] updated the property file to include also the db parameters. The same for the wf definition. Refactoring for compilation

This commit is contained in:
Miriam Baglioni 2024-10-22 10:13:30 +02:00
parent 09a2c93fc7
commit 821540f94a
6 changed files with 95 additions and 54 deletions

View File

@ -61,8 +61,7 @@ public class CoAuthorshipIterator implements Iterator<Relation> {
private Relation getRelation(String orcid1, String orcid2) { private Relation getRelation(String orcid1, String orcid2) {
String source = PERSON_PREFIX + IdentifierFactory.md5(orcid1); String source = PERSON_PREFIX + IdentifierFactory.md5(orcid1);
String target = PERSON_PREFIX + IdentifierFactory.md5(orcid2); String target = PERSON_PREFIX + IdentifierFactory.md5(orcid2);
Relation relation = Relation relation = OafMapperUtils
OafMapperUtils
.getRelation( .getRelation(
source, target, ModelConstants.PERSON_PERSON_RELTYPE, source, target, ModelConstants.PERSON_PERSON_RELTYPE,
ModelConstants.PERSON_PERSON_SUBRELTYPE, ModelConstants.PERSON_PERSON_SUBRELTYPE,

View File

@ -13,9 +13,12 @@ import java.sql.SQLException;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.common.DbClient;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.BZip2Codec; import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@ -27,13 +30,13 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.spark_project.jetty.util.StringUtil; import org.spark_project.jetty.util.StringUtil;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.collection.orcid.model.Author; import eu.dnetlib.dhp.collection.orcid.model.Author;
import eu.dnetlib.dhp.collection.orcid.model.Employment; import eu.dnetlib.dhp.collection.orcid.model.Employment;
import eu.dnetlib.dhp.collection.orcid.model.Work; import eu.dnetlib.dhp.collection.orcid.model.Work;
import eu.dnetlib.dhp.common.DbClient;
import eu.dnetlib.dhp.common.HdfsSupport; import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.action.AtomicAction; import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.common.ModelConstants;
@ -49,11 +52,6 @@ import eu.dnetlib.dhp.schema.oaf.utils.PidType;
import eu.dnetlib.dhp.utils.DHPUtils; import eu.dnetlib.dhp.utils.DHPUtils;
import scala.Tuple2; import scala.Tuple2;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class ExtractPerson implements Serializable { public class ExtractPerson implements Serializable {
private static final Logger log = LoggerFactory.getLogger(ExtractPerson.class); private static final Logger log = LoggerFactory.getLogger(ExtractPerson.class);
private static final String QUERY = "SELECT * FROM project_person WHERE pid_type = 'ORCID'"; private static final String QUERY = "SELECT * FROM project_person WHERE pid_type = 'ORCID'";
@ -77,8 +75,7 @@ public class ExtractPerson implements Serializable {
public static final String OPENAIRE_DATASOURCE_NAME = "OpenAIRE"; public static final String OPENAIRE_DATASOURCE_NAME = "OpenAIRE";
public static List<KeyValue> collectedfromOpenAIRE = OafMapperUtils public static List<KeyValue> collectedfromOpenAIRE = OafMapperUtils
.listKeyValues(OPENAIRE_DATASOURCE_ID, OPENAIRE_DATASOURCE_NAME); .listKeyValues(OPENAIRE_DATASOURCE_ID, OPENAIRE_DATASOURCE_NAME);
public static final DataInfo DATAINFO = OafMapperUtils public static final DataInfo DATAINFO = OafMapperUtils
.dataInfo( .dataInfo(
@ -136,14 +133,15 @@ public class ExtractPerson implements Serializable {
spark -> { spark -> {
HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration()); HdfsSupport.remove(outputPath, spark.sparkContext().hadoopConfiguration());
extractInfoForActionSetFromORCID(spark, inputPath, workingDir); extractInfoForActionSetFromORCID(spark, inputPath, workingDir);
extractInfoForActionSetFromProjects(spark, inputPath, workingDir, dbUrl, dbUser, dbPassword, workingDir + "/project", hdfsNameNode); extractInfoForActionSetFromProjects(
spark, inputPath, workingDir, dbUrl, dbUser, dbPassword, workingDir + "/project", hdfsNameNode);
createActionSet(spark, outputPath, workingDir); createActionSet(spark, outputPath, workingDir);
}); });
} }
private static void extractInfoForActionSetFromProjects(SparkSession spark, String inputPath, String workingDir, private static void extractInfoForActionSetFromProjects(SparkSession spark, String inputPath, String workingDir,
String dbUrl, String dbUser, String dbPassword, String hdfsPath, String hdfsNameNode) throws IOException { String dbUrl, String dbUser, String dbPassword, String hdfsPath, String hdfsNameNode) throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.defaultFS", hdfsNameNode); conf.set("fs.defaultFS", hdfsNameNode);
@ -164,41 +162,40 @@ public class ExtractPerson implements Serializable {
public static Relation getRelationWithProject(ResultSet rs) { public static Relation getRelationWithProject(ResultSet rs) {
try { try {
return getProjectRelation(rs.getString("project"), rs.getString("pid"), return getProjectRelation(
rs.getString("role")); rs.getString("project"), rs.getString("pid"),
rs.getString("role"));
} catch (final SQLException e) { } catch (final SQLException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
} }
private static Relation getProjectRelation(String project, String orcid, String role) { private static Relation getProjectRelation(String project, String orcid, String role) {
String source = PERSON_PREFIX + "::" + IdentifierFactory.md5(orcid); String source = PERSON_PREFIX + "::" + IdentifierFactory.md5(orcid);
String target = project.substring(0,14) String target = project.substring(0, 14)
+ IdentifierFactory.md5(project.substring(15)); + IdentifierFactory.md5(project.substring(15));
List<KeyValue> properties = new ArrayList<>(); List<KeyValue> properties = new ArrayList<>();
Relation relation = OafMapperUtils Relation relation = OafMapperUtils
.getRelation( .getRelation(
source, target, ModelConstants.PROJECT_PERSON_RELTYPE, ModelConstants.PROJECT_PERSON_SUBRELTYPE, source, target, ModelConstants.PROJECT_PERSON_RELTYPE, ModelConstants.PROJECT_PERSON_SUBRELTYPE,
ModelConstants.PROJECT_PERSON_PARTICIPATES, ModelConstants.PROJECT_PERSON_PARTICIPATES,
collectedfromOpenAIRE, collectedfromOpenAIRE,
DATAINFO, DATAINFO,
null); null);
relation.setValidated(true); relation.setValidated(true);
if (StringUtil.isNotBlank(role)) { if (StringUtil.isNotBlank(role)) {
KeyValue kv = new KeyValue(); KeyValue kv = new KeyValue();
kv.setKey("role"); kv.setKey("role");
kv.setValue(role); kv.setValue(role);
properties.add(kv); properties.add(kv);
} }
if (!properties.isEmpty())
relation.setProperties(properties);
return relation;
if (!properties.isEmpty())
relation.setProperties(properties);
return relation;
} }
@ -211,7 +208,7 @@ public class ExtractPerson implements Serializable {
} }
} }
private static void createActionSet(SparkSession spark,String outputPath, String workingDir) { private static void createActionSet(SparkSession spark, String outputPath, String workingDir) {
Dataset<Person> people; Dataset<Person> people;
people = spark people = spark
@ -221,7 +218,7 @@ public class ExtractPerson implements Serializable {
(MapFunction<String, Person>) value -> OBJECT_MAPPER (MapFunction<String, Person>) value -> OBJECT_MAPPER
.readValue(value, Person.class), .readValue(value, Person.class),
Encoders.bean(Person.class)); Encoders.bean(Person.class));
people people
.toJavaRDD() .toJavaRDD()
.map(p -> new AtomicAction(p.getClass(), p)) .map(p -> new AtomicAction(p.getClass(), p))
@ -235,10 +232,10 @@ public class ExtractPerson implements Serializable {
getRelations(spark, workingDir + "/affiliation") getRelations(spark, workingDir + "/affiliation")
.toJavaRDD() .toJavaRDD()
.map(r -> new AtomicAction(r.getClass(), r))) .map(r -> new AtomicAction(r.getClass(), r)))
.union( .union(
getRelations(spark, workingDir + "/project") getRelations(spark, workingDir + "/project")
.toJavaRDD() .toJavaRDD()
.map(r -> new AtomicAction(r.getClass(), r))) .map(r -> new AtomicAction(r.getClass(), r)))
.mapToPair( .mapToPair(
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()), aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
new Text(OBJECT_MAPPER.writeValueAsString(aa)))) new Text(OBJECT_MAPPER.writeValueAsString(aa))))
@ -276,7 +273,7 @@ public class ExtractPerson implements Serializable {
.joinWith(authors, employmentDataset.col("orcid").equalTo(authors.col("orcid"))) .joinWith(authors, employmentDataset.col("orcid").equalTo(authors.col("orcid")))
.map((MapFunction<Tuple2<Employment, Author>, Employment>) t2 -> t2._1(), Encoders.bean(Employment.class)); .map((MapFunction<Tuple2<Employment, Author>, Employment>) t2 -> t2._1(), Encoders.bean(Employment.class));
//Mapping all the orcid profiles even if the profile has no visible works // Mapping all the orcid profiles even if the profile has no visible works
authors.map((MapFunction<Author, Person>) op -> { authors.map((MapFunction<Author, Person>) op -> {
Person person = new Person(); Person person = new Person();
@ -509,13 +506,13 @@ public class ExtractPerson implements Serializable {
return null; return null;
} }
Relation relation = OafMapperUtils Relation relation = OafMapperUtils
.getRelation( .getRelation(
source, target, ModelConstants.RESULT_PERSON_RELTYPE, source, target, ModelConstants.RESULT_PERSON_RELTYPE,
ModelConstants.RESULT_PERSON_SUBRELTYPE, ModelConstants.RESULT_PERSON_SUBRELTYPE,
ModelConstants.RESULT_PERSON_HASAUTHORED, ModelConstants.RESULT_PERSON_HASAUTHORED,
Arrays.asList(OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS)), Arrays.asList(OafMapperUtils.keyValue(orcidKey, ModelConstants.ORCID_DS)),
DATAINFO, DATAINFO,
null); null);
relation.setValidated(true); relation.setValidated(true);
return relation; return relation;
} }

View File

@ -21,5 +21,30 @@
"paramLongName": "workingDir", "paramLongName": "workingDir",
"paramDescription": "the hdfs name node", "paramDescription": "the hdfs name node",
"paramRequired": false "paramRequired": false
},
{
"paramName": "pu",
"paramLongName": "postgresUrl",
"paramDescription": "the hdfs name node",
"paramRequired": false
},
{
"paramName": "ps",
"paramLongName": "postgresUser",
"paramDescription": "the hdfs name node",
"paramRequired": false
},
{
"paramName": "pp",
"paramLongName": "postgresPassword",
"paramDescription": "the hdfs name node",
"paramRequired": false
},{
"paramName": "nn",
"paramLongName": "hdfsNameNode",
"paramDescription": "the hdfs name node",
"paramRequired": false
} }
] ]

View File

@ -1,2 +1,5 @@
inputPath=/data/orcid_2023/tables/ inputPath=/data/orcid_2023/tables/
outputPath=/user/miriam.baglioni/peopleAS outputPath=/user/miriam.baglioni/peopleAS
postgresUrl=jdbc:postgresql://beta.services.openaire.eu:5432/dnet_openaireplus
postgresUser=dnet'
postgresPassword=dnetPwd

View File

@ -9,6 +9,18 @@
<name>outputPath</name> <name>outputPath</name>
<description>the path where to store the actionset</description> <description>the path where to store the actionset</description>
</property> </property>
<property>
<name>postgresUrl</name>
<description>the path where to store the actionset</description>
</property>
<property>
<name>postgresUser</name>
<description>the path where to store the actionset</description>
</property>
<property>
<name>postgresPassword</name>
<description>the path where to store the actionset</description>
</property>
<property> <property>
<name>sparkDriverMemory</name> <name>sparkDriverMemory</name>
<description>memory for driver process</description> <description>memory for driver process</description>
@ -102,6 +114,10 @@
<arg>--inputPath</arg><arg>${inputPath}</arg> <arg>--inputPath</arg><arg>${inputPath}</arg>
<arg>--outputPath</arg><arg>${outputPath}</arg> <arg>--outputPath</arg><arg>${outputPath}</arg>
<arg>--workingDir</arg><arg>${workingDir}</arg> <arg>--workingDir</arg><arg>${workingDir}</arg>
<arg>--hdfsNameNode</arg><arg>${nameNode}</arg>
<arg>--postgresUrl</arg><arg>${postgresUrl}</arg>
<arg>--postgresUser</arg><arg>${postgresUser}</arg>
<arg>--postgresPassword</arg><arg>${postgresPassword}</arg>
</spark> </spark>
<ok to="End"/> <ok to="End"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -31,6 +31,7 @@ class ORCIDAuthorMatchersTest {
assertTrue(matchOrderedTokenAndAbbreviations("孙林 Sun Lin", "Sun Lin")) assertTrue(matchOrderedTokenAndAbbreviations("孙林 Sun Lin", "Sun Lin"))
// assertTrue(AuthorsMatchRevised.compare("孙林 Sun Lin", "孙林")); // not yet implemented // assertTrue(AuthorsMatchRevised.compare("孙林 Sun Lin", "孙林")); // not yet implemented
} }
@Test def testDocumentationNames(): Unit = { @Test def testDocumentationNames(): Unit = {
assertTrue(matchOrderedTokenAndAbbreviations("James C. A. Miller-Jones", "James Antony Miller-Jones")) assertTrue(matchOrderedTokenAndAbbreviations("James C. A. Miller-Jones", "James Antony Miller-Jones"))
} }