1
0
Fork 0

attributes fixes

This commit is contained in:
Michele De Bonis 2024-12-05 14:39:42 +01:00
parent bde59a7c8f
commit 6af3fd16b6
21 changed files with 546 additions and 431 deletions

View File

@ -1,10 +1,5 @@
package eu.dnetlib.pace.tree;
import com.wcohen.ss.AbstractStringDistance; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.joda.time.DateTime;
import java.time.DateTimeException; import java.time.DateTimeException;
import java.time.LocalDate; import java.time.LocalDate;
@ -13,55 +8,62 @@ import java.time.format.DateTimeFormatter;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import org.joda.time.DateTime;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractStringComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("dateRange") @ComparatorClass("dateRange")
public class DateRange extends AbstractStringComparator { public class DateRange extends AbstractStringComparator {
int YEAR_RANGE; int YEAR_RANGE;
public DateRange(Map<String, String> params) { public DateRange(Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
YEAR_RANGE = Integer.parseInt(params.getOrDefault("year_range", "3")); YEAR_RANGE = Integer.parseInt(params.getOrDefault("year_range", "3"));
} }
public DateRange(final double weight) { public DateRange(final double weight) {
super(weight, new com.wcohen.ss.JaroWinkler()); super(weight, new com.wcohen.ss.JaroWinkler());
} }
protected DateRange(final double weight, final AbstractStringDistance ssalgo) { protected DateRange(final double weight, final AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
public static boolean isNumeric(String str) { public static boolean isNumeric(String str) {
return str.matches("\\d+"); //match a number with optional '-' and decimal. return str.matches("\\d+"); // match a number with optional '-' and decimal.
} }
@Override @Override
public double distance(final String a, final String b, final Config conf) { public double distance(final String a, final String b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) { if (a.isEmpty() || b.isEmpty()) {
return -1.0; // return -1 if a field is missing return -1.0; // return -1 if a field is missing
} }
try { try {
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ENGLISH); DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd", Locale.ENGLISH);
LocalDate d1 = LocalDate.parse(a, formatter); LocalDate d1 = LocalDate.parse(a, formatter);
LocalDate d2 = LocalDate.parse(b, formatter); LocalDate d2 = LocalDate.parse(b, formatter);
Period period = Period.between(d1, d2); Period period = Period.between(d1, d2);
return period.getYears() <= YEAR_RANGE? 1.0 : 0.0; return period.getYears() <= YEAR_RANGE ? 1.0 : 0.0;
} } catch (DateTimeException e) {
catch (DateTimeException e) { return -1.0;
return -1.0; }
}
} }
@Override @Override
public double getWeight() { public double getWeight() {
return super.weight; return super.weight;
} }
@Override @Override
protected double normalize(final double d) { protected double normalize(final double d) {
return d; return d;
} }
} }

View File

@ -62,7 +62,7 @@ public class JsonListMatch extends AbstractListComparator {
Set<String> types = Sets.intersection(typesA, typesB); Set<String> types = Sets.intersection(typesA, typesB);
if (types.isEmpty()) // if no common type, it is impossible to compare if (types.isEmpty()) // if no common type, it is impossible to compare
return -1; return -1;
ca = ca.stream().filter(s -> types.contains(s.split("::")[0])).collect(Collectors.toSet()); ca = ca.stream().filter(s -> types.contains(s.split("::")[0])).collect(Collectors.toSet());

View File

@ -72,14 +72,34 @@ public class ComparatorTest extends AbstractPaceTest {
CodeMatch codeMatch = new CodeMatch(params); CodeMatch codeMatch = new CodeMatch(params);
// names have different codes // names have different codes
assertEquals(0.0, codeMatch.distance("physical oceanography at ctd station june 1998 ev02a", "physical oceanography at ctd station june 1998 ir02", conf)); assertEquals(
0.0,
codeMatch
.distance(
"physical oceanography at ctd station june 1998 ev02a",
"physical oceanography at ctd station june 1998 ir02", conf));
// names have same code // names have same code
assertEquals(1.0, codeMatch.distance("physical oceanography at ctd station june 1998 ev02a", "physical oceanography at ctd station june 1998 ev02a", conf)); assertEquals(
1.0,
codeMatch
.distance(
"physical oceanography at ctd station june 1998 ev02a",
"physical oceanography at ctd station june 1998 ev02a", conf));
// code is not in both names // code is not in both names
assertEquals(-1, codeMatch.distance("physical oceanography at ctd station june 1998", "physical oceanography at ctd station june 1998 ev02a", conf)); assertEquals(
assertEquals(1.0, codeMatch.distance("physical oceanography at ctd station june 1998", "physical oceanography at ctd station june 1998", conf)); -1,
codeMatch
.distance(
"physical oceanography at ctd station june 1998",
"physical oceanography at ctd station june 1998 ev02a", conf));
assertEquals(
1.0,
codeMatch
.distance(
"physical oceanography at ctd station june 1998", "physical oceanography at ctd station june 1998",
conf));
} }
@Test @Test
@ -275,7 +295,7 @@ public class ComparatorTest extends AbstractPaceTest {
Arrays Arrays
.asList( .asList(
"{\"datainfo\":{\"deletedbyinference\":false,\"inferenceprovenance\":null,\"inferred\":false,\"invisible\":false,\"provenanceaction\":{\"classid\":\"sysimport:actionset\",\"classname\":\"Harvested\",\"schemeid\":\"dnet:provenanceActions\",\"schemename\":\"dnet:provenanceActions\"},\"trust\":\"0.9\"},\"qualifier\":{\"classid\":\"grid\",\"classname\":\"GRID Identifier\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\"},\"value\":\"grid_1\"}", "{\"datainfo\":{\"deletedbyinference\":false,\"inferenceprovenance\":null,\"inferred\":false,\"invisible\":false,\"provenanceaction\":{\"classid\":\"sysimport:actionset\",\"classname\":\"Harvested\",\"schemeid\":\"dnet:provenanceActions\",\"schemename\":\"dnet:provenanceActions\"},\"trust\":\"0.9\"},\"qualifier\":{\"classid\":\"grid\",\"classname\":\"GRID Identifier\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\"},\"value\":\"grid_1\"}",
"{\"datainfo\":{\"deletedbyinference\":false,\"inferenceprovenance\":null,\"inferred\":false,\"invisible\":false,\"provenanceaction\":{\"classid\":\"sysimport:actionset\",\"classname\":\"Harvested\",\"schemeid\":\"dnet:provenanceActions\",\"schemename\":\"dnet:provenanceActions\"},\"trust\":\"0.9\"},\"qualifier\":{\"classid\":\"ror\",\"classname\":\"Research Organization Registry\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\"},\"value\":\"ror_1\"}"), "{\"datainfo\":{\"deletedbyinference\":false,\"inferenceprovenance\":null,\"inferred\":false,\"invisible\":false,\"provenanceaction\":{\"classid\":\"sysimport:actionset\",\"classname\":\"Harvested\",\"schemeid\":\"dnet:provenanceActions\",\"schemename\":\"dnet:provenanceActions\"},\"trust\":\"0.9\"},\"qualifier\":{\"classid\":\"ror\",\"classname\":\"Research Organization Registry\",\"schemeid\":\"dnet:pid_types\",\"schemename\":\"dnet:pid_types\"},\"value\":\"ror_1\"}"),
"authors"); "authors");
List<String> b = createFieldList( List<String> b = createFieldList(
Arrays Arrays

View File

@ -3,8 +3,6 @@ package eu.dnetlib.dhp.actionmanager;
import java.util.Optional; import java.util.Optional;
import eu.dnetlib.dhp.schema.oaf.Instance;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders; import org.apache.spark.sql.Encoders;
@ -15,6 +13,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.HdfsSupport; import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.Instance;
import eu.dnetlib.dhp.schema.oaf.Qualifier;
import eu.dnetlib.dhp.schema.oaf.StructuredProperty; import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
import eu.dnetlib.dhp.schema.oaf.Subject; import eu.dnetlib.dhp.schema.oaf.Subject;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils; import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;

View File

@ -1,15 +1,15 @@
package eu.dnetlib.dhp.actionmanager.raid; package eu.dnetlib.dhp.actionmanager.raid;
import com.fasterxml.jackson.databind.ObjectMapper; import static eu.dnetlib.dhp.actionmanager.personentity.ExtractPerson.OPENAIRE_DATASOURCE_ID;
import eu.dnetlib.dhp.actionmanager.raid.model.RAiDEntity; import static eu.dnetlib.dhp.actionmanager.personentity.ExtractPerson.OPENAIRE_DATASOURCE_NAME;
import eu.dnetlib.dhp.application.ArgumentApplicationParser; import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import eu.dnetlib.dhp.common.Constants; import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import eu.dnetlib.dhp.common.HdfsSupport; import static eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils.*;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.common.ModelConstants; import java.util.*;
import eu.dnetlib.dhp.schema.oaf.*; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
import eu.dnetlib.dhp.utils.DHPUtils;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat;
@ -19,172 +19,191 @@ import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.dhp.actionmanager.raid.model.RAiDEntity;
import eu.dnetlib.dhp.application.ArgumentApplicationParser;
import eu.dnetlib.dhp.common.Constants;
import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils;
import eu.dnetlib.dhp.utils.DHPUtils;
import scala.Tuple2; import scala.Tuple2;
import java.util.*;
import java.util.stream.Collectors;
import static eu.dnetlib.dhp.actionmanager.personentity.ExtractPerson.OPENAIRE_DATASOURCE_ID;
import static eu.dnetlib.dhp.actionmanager.personentity.ExtractPerson.OPENAIRE_DATASOURCE_NAME;
import static eu.dnetlib.dhp.common.SparkSessionSupport.runWithSparkSession;
import static eu.dnetlib.dhp.schema.common.ModelConstants.*;
import static eu.dnetlib.dhp.schema.oaf.utils.OafMapperUtils.*;
public class GenerateRAiDActionSetJob { public class GenerateRAiDActionSetJob {
private static final Logger log = LoggerFactory.getLogger(eu.dnetlib.dhp.actionmanager.raid.GenerateRAiDActionSetJob.class); private static final Logger log = LoggerFactory
.getLogger(eu.dnetlib.dhp.actionmanager.raid.GenerateRAiDActionSetJob.class);
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private static final List<KeyValue> RAID_COLLECTED_FROM = listKeyValues( private static final List<KeyValue> RAID_COLLECTED_FROM = listKeyValues(
OPENAIRE_DATASOURCE_ID, OPENAIRE_DATASOURCE_NAME); OPENAIRE_DATASOURCE_ID, OPENAIRE_DATASOURCE_NAME);
private static final Qualifier RAID_QUALIFIER = qualifier("raid:openaireinference", "raid:openaireinference", DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS); private static final Qualifier RAID_QUALIFIER = qualifier("0049", "Research Activity Identifier", DNET_PUBLICATION_RESOURCE, DNET_PUBLICATION_RESOURCE);
private static final DataInfo RAID_DATA_INFO = dataInfo( private static final Qualifier RAID_INFERENCE_QUALIFIER = qualifier(
false, OPENAIRE_DATASOURCE_NAME, true, false, RAID_QUALIFIER, "0.92"); "raid:openaireinference", "Inferred by OpenAIRE", DNET_PROVENANCE_ACTIONS, DNET_PROVENANCE_ACTIONS);
public static void main(final String[] args) throws Exception { private static final DataInfo RAID_DATA_INFO = dataInfo(
false, OPENAIRE_DATASOURCE_NAME, true, false, RAID_INFERENCE_QUALIFIER, "0.92");
final String jsonConfiguration = IOUtils public static void main(final String[] args) throws Exception {
.toString(
eu.dnetlib.dhp.actionmanager.raid.GenerateRAiDActionSetJob.class
.getResourceAsStream("/eu/dnetlib/dhp/actionmanager/raid/action_set_parameters.json"));
final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration); final String jsonConfiguration = IOUtils
.toString(
eu.dnetlib.dhp.actionmanager.raid.GenerateRAiDActionSetJob.class
.getResourceAsStream("/eu/dnetlib/dhp/actionmanager/raid/action_set_parameters.json"));
parser.parseArgument(args); final ArgumentApplicationParser parser = new ArgumentApplicationParser(jsonConfiguration);
final Boolean isSparkSessionManaged = Optional parser.parseArgument(args);
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
log.info("isSparkSessionManaged: {}", isSparkSessionManaged); final Boolean isSparkSessionManaged = Optional
.ofNullable(parser.get("isSparkSessionManaged"))
.map(Boolean::valueOf)
.orElse(Boolean.TRUE);
final String inputPath = parser.get("inputPath"); log.info("isSparkSessionManaged: {}", isSparkSessionManaged);
log.info("inputPath: {}", inputPath);
final String outputPath = parser.get("outputPath"); final String inputPath = parser.get("inputPath");
log.info("outputPath {}: ", outputPath); log.info("inputPath: {}", inputPath);
final SparkConf conf = new SparkConf(); final String outputPath = parser.get("outputPath");
log.info("outputPath {}: ", outputPath);
runWithSparkSession(conf, isSparkSessionManaged, spark -> { final SparkConf conf = new SparkConf();
removeOutputDir(spark, outputPath);
processRAiDEntities(spark, inputPath, outputPath);
});
}
private static void removeOutputDir(final SparkSession spark, final String path) { runWithSparkSession(conf, isSparkSessionManaged, spark -> {
HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration()); removeOutputDir(spark, outputPath);
} processRAiDEntities(spark, inputPath, outputPath);
});
}
static void processRAiDEntities(final SparkSession spark, private static void removeOutputDir(final SparkSession spark, final String path) {
final String inputPath, HdfsSupport.remove(path, spark.sparkContext().hadoopConfiguration());
final String outputPath) { }
readInputPath(spark, inputPath)
.map(GenerateRAiDActionSetJob::prepareRAiD)
.flatMap(List::iterator)
.mapToPair(
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
} static void processRAiDEntities(final SparkSession spark,
final String inputPath,
final String outputPath) {
readInputPath(spark, inputPath)
.map(GenerateRAiDActionSetJob::prepareRAiD)
.flatMap(List::iterator)
.mapToPair(
aa -> new Tuple2<>(new Text(aa.getClazz().getCanonicalName()),
new Text(OBJECT_MAPPER.writeValueAsString(aa))))
.saveAsHadoopFile(outputPath, Text.class, Text.class, SequenceFileOutputFormat.class);
protected static List<AtomicAction<? extends Oaf>> prepareRAiD(final RAiDEntity r) { }
final Date now = new Date(); protected static List<AtomicAction<? extends Oaf>> prepareRAiD(final RAiDEntity r) {
final OtherResearchProduct orp = new OtherResearchProduct();
final List<AtomicAction<? extends Oaf>> res = new ArrayList<>();
String raidId = calculateOpenaireId(r.getRaid());
orp.setId(raidId); final Date now = new Date();
orp.setCollectedfrom(RAID_COLLECTED_FROM); final OtherResearchProduct orp = new OtherResearchProduct();
orp.setDataInfo(RAID_DATA_INFO); final List<AtomicAction<? extends Oaf>> res = new ArrayList<>();
orp.setResourcetype(RAID_QUALIFIER); String raidId = calculateOpenaireId(r.getRaid());
orp.setTitle(
Collections.singletonList(
structuredProperty(
r.getTitle(),
qualifier("main title", "main title", DNET_DATACITE_TITLE, DNET_DATACITE_TITLE),
RAID_DATA_INFO))
);
orp.setDescription(listFields(RAID_DATA_INFO, r.getSummary()));
orp.setAuthor(createAuthors(r.getAuthors()));
orp.setInstance(Collections.singletonList(eu.dnetlib.dhp.actionmanager.Constants.getInstance(RAID_QUALIFIER)));
orp.setSubject(
r.getSubjects()
.stream()
.map(s -> subject(s, qualifier(DNET_SUBJECT_KEYWORD, DNET_SUBJECT_KEYWORD, DNET_SUBJECT_TYPOLOGIES, DNET_SUBJECT_TYPOLOGIES), RAID_DATA_INFO))
.collect(Collectors.toList())
);
orp.setRelevantdate(
Arrays.asList(
structuredProperty(r.getEndDate(), qualifier("endDate","endDate", DNET_DATACITE_DATE, DNET_DATACITE_DATE), RAID_DATA_INFO),
structuredProperty(r.getStartDate(), qualifier("startDate", "startDate", DNET_DATACITE_DATE, DNET_DATACITE_DATE), RAID_DATA_INFO)
)
);
orp.setLastupdatetimestamp(now.getTime());
orp.setDateofcollection(r.getStartDate());
res.add(new AtomicAction<>(OtherResearchProduct.class, orp)); orp.setId(raidId);
orp.setCollectedfrom(RAID_COLLECTED_FROM);
orp.setDataInfo(RAID_DATA_INFO);
orp
.setTitle(
Collections
.singletonList(
structuredProperty(
r.getTitle(),
qualifier("main title", "main title", DNET_DATACITE_TITLE, DNET_DATACITE_TITLE),
RAID_DATA_INFO)));
orp.setDescription(listFields(RAID_DATA_INFO, r.getSummary()));
// orp.setAuthor(createAuthors(r.getAuthors()));
orp.setInstance(Collections.singletonList(eu.dnetlib.dhp.actionmanager.Constants.getInstance(RAID_QUALIFIER)));
orp
.setSubject(
r
.getSubjects()
.stream()
.map(
s -> subject(
s,
qualifier(
DNET_SUBJECT_KEYWORD, DNET_SUBJECT_KEYWORD, DNET_SUBJECT_TYPOLOGIES,
DNET_SUBJECT_TYPOLOGIES),
RAID_DATA_INFO))
.collect(Collectors.toList()));
orp
.setRelevantdate(
Arrays
.asList(
structuredProperty(
r.getEndDate(), qualifier("endDate", "endDate", DNET_DATACITE_DATE, DNET_DATACITE_DATE),
RAID_DATA_INFO),
structuredProperty(
r.getStartDate(),
qualifier("startDate", "startDate", DNET_DATACITE_DATE, DNET_DATACITE_DATE),
RAID_DATA_INFO)));
orp.setLastupdatetimestamp(now.getTime());
orp.setDateofacceptance(field(r.getStartDate(), RAID_DATA_INFO));
for(String resultId: r.getIds()) { res.add(new AtomicAction<>(OtherResearchProduct.class, orp));
Relation rel1 = OafMapperUtils.getRelation(
raidId,
resultId,
ModelConstants.RESULT_RESULT,
ModelConstants.OUTCOME,
PART,
RAID_COLLECTED_FROM,
RAID_DATA_INFO,
now.getTime(),
null,
null
);
Relation rel2 = OafMapperUtils.getRelation(
resultId,
raidId,
ModelConstants.RESULT_RESULT,
ModelConstants.OUTCOME,
IS_PART_OF,
RAID_COLLECTED_FROM,
RAID_DATA_INFO,
now.getTime(),
null,
null
);
res.add(new AtomicAction<>(Relation.class, rel1));
res.add(new AtomicAction<>(Relation.class, rel2));
}
return res; for (String resultId : r.getIds()) {
} Relation rel1 = OafMapperUtils
.getRelation(
raidId,
resultId,
ModelConstants.RESULT_RESULT,
PART,
HAS_PART,
RAID_COLLECTED_FROM,
RAID_DATA_INFO,
now.getTime(),
null,
null);
Relation rel2 = OafMapperUtils
.getRelation(
resultId,
raidId,
ModelConstants.RESULT_RESULT,
PART,
IS_PART_OF,
RAID_COLLECTED_FROM,
RAID_DATA_INFO,
now.getTime(),
null,
null);
res.add(new AtomicAction<>(Relation.class, rel1));
res.add(new AtomicAction<>(Relation.class, rel2));
}
public static String calculateOpenaireId(final String raid) { return res;
return String.format("50|%s::%s", Constants.RAID_NS_PREFIX, DHPUtils.md5(raid)); }
}
public static List<Author> createAuthors(final List<String> author) { public static String calculateOpenaireId(final String raid) {
return author.stream().map(s-> { return String.format("50|%s::%s", Constants.RAID_NS_PREFIX, DHPUtils.md5(raid));
Author a = new Author(); }
a.setFullname(s);
return a;
}).collect(Collectors.toList());
}
private static JavaRDD<RAiDEntity> readInputPath( public static List<Author> createAuthors(final List<String> author) {
final SparkSession spark, return author.stream().map(s -> {
final String path) { Author a = new Author();
a.setFullname(s);
return a;
}).collect(Collectors.toList());
}
return spark private static JavaRDD<RAiDEntity> readInputPath(
.read() final SparkSession spark,
.json(path) final String path) {
.as(Encoders.bean(RAiDEntity.class))
.toJavaRDD();
} return spark
.read()
.json(path)
.as(Encoders.bean(RAiDEntity.class))
.toJavaRDD();
}
} }

View File

@ -1,2 +1,5 @@
package eu.dnetlib.dhp.actionmanager.raid.model;public class GenerateRAiDActionSetJob {
package eu.dnetlib.dhp.actionmanager.raid.model;
public class GenerateRAiDActionSetJob {
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.actionmanager.raid.model; package eu.dnetlib.dhp.actionmanager.raid.model;
import java.io.Serializable; import java.io.Serializable;
@ -5,98 +6,101 @@ import java.util.List;
public class RAiDEntity implements Serializable { public class RAiDEntity implements Serializable {
String raid; String raid;
List<String> authors; List<String> authors;
String startDate; String startDate;
String endDate; String endDate;
List<String> subjects; List<String> subjects;
List<String> titles; List<String> titles;
List<String> ids; List<String> ids;
String title; String title;
String summary; String summary;
public RAiDEntity(){} public RAiDEntity() {
public RAiDEntity(String raid, List<String> authors, String startDate, String endDate, List<String> subjects, List<String> titles, List<String> ids, String title, String summary) { }
this.raid = raid;
this.authors = authors;
this.startDate = startDate;
this.endDate = endDate;
this.subjects = subjects;
this.titles = titles;
this.ids = ids;
this.title = title;
this.summary = summary;
}
public String getRaid() { public RAiDEntity(String raid, List<String> authors, String startDate, String endDate, List<String> subjects,
return raid; List<String> titles, List<String> ids, String title, String summary) {
} this.raid = raid;
this.authors = authors;
this.startDate = startDate;
this.endDate = endDate;
this.subjects = subjects;
this.titles = titles;
this.ids = ids;
this.title = title;
this.summary = summary;
}
public void setRaid(String raid) { public String getRaid() {
this.raid = raid; return raid;
} }
public List<String> getAuthors() { public void setRaid(String raid) {
return authors; this.raid = raid;
} }
public void setAuthors(List<String> authors) { public List<String> getAuthors() {
this.authors = authors; return authors;
} }
public String getStartDate() { public void setAuthors(List<String> authors) {
return startDate; this.authors = authors;
} }
public void setStartDate(String startDate) { public String getStartDate() {
this.startDate = startDate; return startDate;
} }
public String getEndDate() { public void setStartDate(String startDate) {
return endDate; this.startDate = startDate;
} }
public void setEndDate(String endDate) { public String getEndDate() {
this.endDate = endDate; return endDate;
} }
public List<String> getSubjects() { public void setEndDate(String endDate) {
return subjects; this.endDate = endDate;
} }
public void setSubjects(List<String> subjects) { public List<String> getSubjects() {
this.subjects = subjects; return subjects;
} }
public List<String> getTitles() { public void setSubjects(List<String> subjects) {
return titles; this.subjects = subjects;
} }
public void setTitles(List<String> titles) { public List<String> getTitles() {
this.titles = titles; return titles;
} }
public List<String> getIds() { public void setTitles(List<String> titles) {
return ids; this.titles = titles;
} }
public void setIds(List<String> ids) { public List<String> getIds() {
this.ids = ids; return ids;
} }
public String getTitle() { public void setIds(List<String> ids) {
return title; this.ids = ids;
} }
public void setTitle(String title) { public String getTitle() {
this.title = title; return title;
} }
public String getSummary() { public void setTitle(String title) {
return summary; this.title = title;
} }
public void setSummary(String summary) { public String getSummary() {
this.summary = summary; return summary;
} }
public void setSummary(String summary) {
this.summary = summary;
}
} }

View File

@ -21,7 +21,6 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.oaf.*;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -45,6 +44,7 @@ import eu.dnetlib.dhp.common.Constants;
import eu.dnetlib.dhp.common.HdfsSupport; import eu.dnetlib.dhp.common.HdfsSupport;
import eu.dnetlib.dhp.schema.action.AtomicAction; import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.common.ModelConstants; import eu.dnetlib.dhp.schema.common.ModelConstants;
import eu.dnetlib.dhp.schema.oaf.*;
import eu.dnetlib.dhp.utils.DHPUtils; import eu.dnetlib.dhp.utils.DHPUtils;
import scala.Tuple2; import scala.Tuple2;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.dhp.sx.bio.pubmed; package eu.dnetlib.dhp.sx.bio.pubmed;
/** /**
@ -7,32 +8,33 @@ package eu.dnetlib.dhp.sx.bio.pubmed;
*/ */
public class PMAffiliation { public class PMAffiliation {
private String name; private String name;
private PMIdentifier identifier; private PMIdentifier identifier;
public PMAffiliation() { public PMAffiliation() {
} }
public PMAffiliation(String name, PMIdentifier identifier) {
this.name = name;
this.identifier = identifier;
}
public String getName() { public PMAffiliation(String name, PMIdentifier identifier) {
return name; this.name = name;
} this.identifier = identifier;
}
public void setName(String name) { public String getName() {
this.name = name; return name;
} }
public PMIdentifier getIdentifier() { public void setName(String name) {
return identifier; this.name = name;
} }
public PMAffiliation setIdentifier(PMIdentifier identifier) { public PMIdentifier getIdentifier() {
this.identifier = identifier; return identifier;
return this; }
}
public PMAffiliation setIdentifier(PMIdentifier identifier) {
this.identifier = identifier;
return this;
}
} }

View File

@ -97,5 +97,4 @@ public class PMAuthor implements Serializable {
this.affiliation = affiliation; this.affiliation = affiliation;
} }
} }

View File

@ -1,53 +1,53 @@
package eu.dnetlib.dhp.sx.bio.pubmed; package eu.dnetlib.dhp.sx.bio.pubmed;
public class PMIdentifier { public class PMIdentifier {
private String pid; private String pid;
private String type; private String type;
public PMIdentifier(String pid, String type) {
this.pid = cleanPid(pid);
this.type = type;
}
public PMIdentifier(String pid, String type) { public PMIdentifier() {
this.pid = cleanPid(pid);
this.type = type;
}
public PMIdentifier() { }
} private String cleanPid(String pid) {
private String cleanPid(String pid) { if (pid == null) {
return null;
}
if (pid == null) { // clean ORCID ID in the form 0000000163025705 to 0000-0001-6302-5705
return null; if (pid.matches("[0-9]{15}[0-9X]")) {
} return pid.replaceAll("(.{4})(.{4})(.{4})(.{4})", "$1-$2-$3-$4");
}
// clean ORCID ID in the form 0000000163025705 to 0000-0001-6302-5705 // clean ORCID in the form http://orcid.org/0000-0001-8567-3543 to 0000-0001-8567-3543
if (pid.matches("[0-9]{15}[0-9X]")) { if (pid.matches("http://orcid.org/[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{4}")) {
return pid.replaceAll("(.{4})(.{4})(.{4})(.{4})", "$1-$2-$3-$4"); return pid.replaceAll("http://orcid.org/", "");
} }
return pid;
}
// clean ORCID in the form http://orcid.org/0000-0001-8567-3543 to 0000-0001-8567-3543 public String getPid() {
if (pid.matches("http://orcid.org/[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{4}")) { return pid;
return pid.replaceAll("http://orcid.org/", ""); }
}
return pid;
}
public String getPid() { public PMIdentifier setPid(String pid) {
return pid; this.pid = cleanPid(pid);
} return this;
}
public PMIdentifier setPid(String pid) { public String getType() {
this.pid = cleanPid(pid); return type;
return this; }
}
public String getType() { public PMIdentifier setType(String type) {
return type; this.type = type;
} return this;
}
public PMIdentifier setType(String type) {
this.type = type;
return this;
}
} }

View File

@ -20,8 +20,6 @@
<fs> <fs>
<delete path='${raidActionSetPath}'/> <delete path='${raidActionSetPath}'/>
<mkdir path='${raidActionSetPath}'/> <mkdir path='${raidActionSetPath}'/>
<delete path='${workingDir}'/>
<mkdir path='${workingDir}'/>
</fs> </fs>
<ok to="processRAiDFile"/> <ok to="processRAiDFile"/>
<error to="Kill"/> <error to="Kill"/>

View File

@ -673,7 +673,6 @@ case object Crossref2Oaf {
val doi = input.getString(0) val doi = input.getString(0)
val rorId = input.getString(1) val rorId = input.getString(1)
val pubId = IdentifierFactory.idFromPid("50", "doi", DoiCleaningRule.clean(doi), true) val pubId = IdentifierFactory.idFromPid("50", "doi", DoiCleaningRule.clean(doi), true)
val affId = GenerateRorActionSetJob.calculateOpenaireId(rorId) val affId = GenerateRorActionSetJob.calculateOpenaireId(rorId)

View File

@ -82,21 +82,22 @@ class PMParser2 {
a.setLastName((author \ "LastName").text) a.setLastName((author \ "LastName").text)
a.setForeName((author \ "ForeName").text) a.setForeName((author \ "ForeName").text)
val id = (author \ "Identifier").text val id = (author \ "Identifier").text
val idType =(author \ "Identifier" \ "@Source").text val idType = (author \ "Identifier" \ "@Source").text
if(id != null && id.nonEmpty && idType != null && idType.nonEmpty) { if (id != null && id.nonEmpty && idType != null && idType.nonEmpty) {
a.setIdentifier(new PMIdentifier(id, idType)) a.setIdentifier(new PMIdentifier(id, idType))
} }
val affiliation = (author \ "AffiliationInfo" \ "Affiliation").text val affiliation = (author \ "AffiliationInfo" \ "Affiliation").text
val affiliationId = (author \ "AffiliationInfo" \ "Identifier").text val affiliationId = (author \ "AffiliationInfo" \ "Identifier").text
val affiliationIdType = (author \ "AffiliationInfo" \ "Identifier" \ "@Source").text val affiliationIdType = (author \ "AffiliationInfo" \ "Identifier" \ "@Source").text
if(affiliation != null && affiliation.nonEmpty) { if (affiliation != null && affiliation.nonEmpty) {
val aff = new PMAffiliation() val aff = new PMAffiliation()
aff.setName(affiliation) aff.setName(affiliation)
if(affiliationId != null && affiliationId.nonEmpty && affiliationIdType != null && affiliationIdType.nonEmpty) { if (
affiliationId != null && affiliationId.nonEmpty && affiliationIdType != null && affiliationIdType.nonEmpty
) {
aff.setIdentifier(new PMIdentifier(affiliationId, affiliationIdType)) aff.setIdentifier(new PMIdentifier(affiliationId, affiliationIdType))
} }
a.setAffiliation(aff) a.setAffiliation(aff)

View File

@ -294,11 +294,23 @@ object PubMedToOaf {
author.setName(a.getForeName) author.setName(a.getForeName)
author.setSurname(a.getLastName) author.setSurname(a.getLastName)
author.setFullname(a.getFullName) author.setFullname(a.getFullName)
if(a.getIdentifier != null) { if (a.getIdentifier != null) {
author.setPid(List(OafMapperUtils.structuredProperty(a.getIdentifier.getPid, author.setPid(
OafMapperUtils.qualifier(a.getIdentifier.getType,a.getIdentifier.getType,ModelConstants.DNET_PID_TYPES, ModelConstants.DNET_PID_TYPES), dataInfo)).asJava) List(
OafMapperUtils.structuredProperty(
a.getIdentifier.getPid,
OafMapperUtils.qualifier(
a.getIdentifier.getType,
a.getIdentifier.getType,
ModelConstants.DNET_PID_TYPES,
ModelConstants.DNET_PID_TYPES
),
dataInfo
)
).asJava
)
} }
if (a.getAffiliation!= null) if (a.getAffiliation != null)
author.setRawAffiliationString(List(a.getAffiliation.getName).asJava) author.setRawAffiliationString(List(a.getAffiliation.getName).asJava)
author.setRank(index + 1) author.setRank(index + 1)
author author

View File

@ -1,11 +1,16 @@
package eu.dnetlib.dhp.actionmanager.raid; package eu.dnetlib.dhp.actionmanager.raid;
import eu.dnetlib.dhp.actionmanager.opencitations.CreateOpenCitationsASTest; import static java.nio.file.Files.createTempDirectory;
import eu.dnetlib.dhp.actionmanager.raid.model.RAiDEntity;
import eu.dnetlib.dhp.schema.action.AtomicAction; import static eu.dnetlib.dhp.actionmanager.Constants.OBJECT_MAPPER;
import eu.dnetlib.dhp.schema.oaf.Oaf; import static org.junit.jupiter.api.Assertions.assertEquals;
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
import eu.dnetlib.dhp.schema.oaf.Relation; import java.io.File;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@ -20,93 +25,141 @@ import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import eu.dnetlib.dhp.actionmanager.opencitations.CreateOpenCitationsASTest;
import eu.dnetlib.dhp.actionmanager.raid.model.RAiDEntity;
import eu.dnetlib.dhp.schema.action.AtomicAction;
import eu.dnetlib.dhp.schema.oaf.Oaf;
import eu.dnetlib.dhp.schema.oaf.OtherResearchProduct;
import eu.dnetlib.dhp.schema.oaf.Relation;
import scala.Tuple2; import scala.Tuple2;
import java.io.File;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import static eu.dnetlib.dhp.actionmanager.Constants.OBJECT_MAPPER;
import static java.nio.file.Files.createTempDirectory;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class GenerateRAiDActionSetJobTest { public class GenerateRAiDActionSetJobTest {
private static String input_path; private static String input_path;
private static String output_path; private static String output_path;
static SparkSession spark; static SparkSession spark;
@BeforeEach @BeforeEach
void setUp() throws Exception { void setUp() throws Exception {
input_path = Paths input_path = Paths
.get(GenerateRAiDActionSetJobTest.class.getResource("/eu/dnetlib/dhp/actionmanager/raid/raid_example.json").toURI()) .get(
.toFile() GenerateRAiDActionSetJobTest.class
.getAbsolutePath(); .getResource("/eu/dnetlib/dhp/actionmanager/raid/raid_example.json")
.toURI())
.toFile()
.getAbsolutePath();
output_path = createTempDirectory(GenerateRAiDActionSetJobTest.class.getSimpleName() + "-") output_path = createTempDirectory(GenerateRAiDActionSetJobTest.class.getSimpleName() + "-")
.toAbsolutePath() .toAbsolutePath()
.toString(); .toString();
SparkConf conf = new SparkConf(); SparkConf conf = new SparkConf();
conf.setAppName(GenerateRAiDActionSetJobTest.class.getSimpleName()); conf.setAppName(GenerateRAiDActionSetJobTest.class.getSimpleName());
conf.setMaster("local[*]"); conf.setMaster("local[*]");
conf.set("spark.driver.host", "localhost"); conf.set("spark.driver.host", "localhost");
conf.set("hive.metastore.local", "true"); conf.set("hive.metastore.local", "true");
conf.set("spark.ui.enabled", "false"); conf.set("spark.ui.enabled", "false");
conf.set("spark.sql.warehouse.dir", output_path); conf.set("spark.sql.warehouse.dir", output_path);
conf.set("hive.metastore.warehouse.dir", output_path); conf.set("hive.metastore.warehouse.dir", output_path);
spark = SparkSession spark = SparkSession
.builder() .builder()
.appName(GenerateRAiDActionSetJobTest.class.getSimpleName()) .appName(GenerateRAiDActionSetJobTest.class.getSimpleName())
.config(conf) .config(conf)
.getOrCreate(); .getOrCreate();
} }
@AfterAll @AfterAll
static void cleanUp() throws Exception { static void cleanUp() throws Exception {
FileUtils.deleteDirectory(new File(output_path)); FileUtils.deleteDirectory(new File(output_path));
} }
@Test @Test
@Disabled @Disabled
void testProcessRAiDEntities() { void testProcessRAiDEntities() {
GenerateRAiDActionSetJob.processRAiDEntities(spark, input_path, output_path + "/test_raid_action_set"); GenerateRAiDActionSetJob.processRAiDEntities(spark, input_path, output_path + "/test_raid_action_set");
JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); JavaSparkContext sc = JavaSparkContext.fromSparkContext(spark.sparkContext());
JavaRDD<? extends Oaf> result = sc JavaRDD<? extends Oaf> result = sc
.sequenceFile(output_path + "/test_raid_action_set", Text.class, Text.class) .sequenceFile(output_path + "/test_raid_action_set", Text.class, Text.class)
.map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class)) .map(value -> OBJECT_MAPPER.readValue(value._2().toString(), AtomicAction.class))
.map(AtomicAction::getPayload); .map(AtomicAction::getPayload);
assertEquals(80, result.count()); assertEquals(80, result.count());
} }
@Test @Test
void testPrepareRAiD() { void testPrepareRAiD() {
List<AtomicAction<? extends Oaf>> atomicActions = GenerateRAiDActionSetJob.prepareRAiD(new RAiDEntity( List<AtomicAction<? extends Oaf>> atomicActions = GenerateRAiDActionSetJob
"-92190526", .prepareRAiD(
Arrays.asList("Berli, Justin", "Le Mao, Bérénice", "Guillaume Touya", "Wenclik, Laura", "Courtial, Azelle", "Muehlenhaus, Ian", "Justin Berli", "Touya, Guillaume", "Gruget, Maïeul", "Azelle Courtial", "Ian Muhlenhaus", "Maïeul Gruget", "Marion Dumont", "Maïeul GRUGET", "Cécile Duchêne"), new RAiDEntity(
"2021-09-10", "-92190526",
"2024-02-16", Arrays
Arrays.asList("cartography, zoom, pan, desert fog", "Road network", "zooming", "Pan-scalar maps", "pan-scalar map", "Python library", "QGIS", "map design", "landmarks", "Cartes transscalaires", "anchor", "disorientation", "[INFO]Computer Science [cs]", "[SHS.GEO]Humanities and Social Sciences/Geography", "cognitive cartography", "eye-tracking", "Computers in Earth Sciences", "Topographic map", "National Mapping Agency", "General Medicine", "Geography, Planning and Development", "multi-scales", "pan-scalar maps", "Selection", "cartography", "General Earth and Planetary Sciences", "progressiveness", "map generalisation", "Eye-tracker", "zoom", "algorithms", "Map Design", "cartography, map generalisation, zoom, multi-scale map", "Interactive maps", "Map generalisation", "Earth and Planetary Sciences (miscellaneous)", "Cartographic generalization", "rivers", "Benchmark", "General Environmental Science", "open source", "drawing", "Constraint", "Multi-scale maps"), .asList(
Arrays.asList("Where do people look at during multi-scale map tasks?", "FogDetector survey raw data", "Collection of cartographic disorientation stories", "Anchorwhat dataset", "BasqueRoads: A Benchmark for Road Network Selection", "Progressive river network selection for pan-scalar maps", "BasqueRoads, a dataset to benchmark road selection algorithms", "Missing the city for buildings? A critical review of pan-scalar map generalization and design in contemporary zoomable maps", "Empirical approach to advance the generalisation of multi-scale maps", "L'Alpe d'Huez: a dataset to benchmark topographic map generalisation", "eye-tracking data from a survey on zooming in a pan-scalar map", "Material of the experiment 'More is Less' from the MapMuxing project", "Cartagen4py, an open source Python library for map generalisation", "LAlpe dHuez: A Benchmark for Topographic Map Generalisation"), "Berli, Justin", "Le Mao, Bérénice", "Guillaume Touya", "Wenclik, Laura",
Arrays.asList("50|doi_dedup___::6915135e0aa39f913394513f809ae58a", "50|doi_dedup___::754e3c283639bc6e104c925ff3e34007", "50|doi_dedup___::13517477f3c1261d57a3364363ce6ce0", "50|doi_dedup___::675b16c73accc4e7242bbb4ed9b3724a", "50|doi_dedup___::94ce09906b2d7d37eb2206cea8a50153", "50|dedup_wf_002::cc575d5ca5651ff8c3029a3a76e7e70a", "50|doi_dedup___::c5e52baddda17c755d1bae012a97dc13", "50|doi_dedup___::4f5f38c9e08fe995f7278963183f8ad4", "50|doi_dedup___::a9bc4453273b2d02648a5cb453195042", "50|doi_dedup___::5e893dc0cb7624a33f41c9b428bd59f7", "50|doi_dedup___::c1ecdef48fd9be811a291deed950e1c5", "50|doi_dedup___::9e93c8f2d97c35de8a6a57a5b53ef283", "50|dedup_wf_002::d08be0ed27b13d8a880e891e08d093ea", "50|doi_dedup___::f8d8b3b9eddeca2fc0e3bc9e63996555"), "Courtial, Azelle", "Muehlenhaus, Ian", "Justin Berli", "Touya, Guillaume",
"Exploring Multi-Scale Map Generalization and Design", "Gruget, Maïeul", "Azelle Courtial", "Ian Muhlenhaus", "Maïeul Gruget", "Marion Dumont",
"This project aims to advance the generalization of multi-scale maps by investigating the impact of different design elements on user experience. The research involves collecting and analyzing data from various sources, including surveys, eye-tracking studies, and user experiments. The goal is to identify best practices for map generalization and design, with a focus on reducing disorientation and improving information retrieval during exploration. The project has led to the development of several datasets, including BasqueRoads, AnchorWhat, and L'Alpe d'Huez, which can be used to benchmark road selection algorithms and topographic map generalization techniques. The research has also resulted in the creation of a Python library, Cartagen4py, for map generalization. The findings of this project have the potential to improve the design and usability of multi-scale maps, making them more effective tools for navigation and information retrieval." "Maïeul GRUGET", "Cécile Duchêne"),
)); "2021-09-10",
"2024-02-16",
Arrays
.asList(
"cartography, zoom, pan, desert fog", "Road network", "zooming", "Pan-scalar maps",
"pan-scalar map", "Python library", "QGIS", "map design", "landmarks",
"Cartes transscalaires", "anchor", "disorientation", "[INFO]Computer Science [cs]",
"[SHS.GEO]Humanities and Social Sciences/Geography", "cognitive cartography",
"eye-tracking", "Computers in Earth Sciences", "Topographic map", "National Mapping Agency",
"General Medicine", "Geography, Planning and Development", "multi-scales",
"pan-scalar maps", "Selection", "cartography", "General Earth and Planetary Sciences",
"progressiveness", "map generalisation", "Eye-tracker", "zoom", "algorithms", "Map Design",
"cartography, map generalisation, zoom, multi-scale map", "Interactive maps",
"Map generalisation", "Earth and Planetary Sciences (miscellaneous)",
"Cartographic generalization", "rivers", "Benchmark", "General Environmental Science",
"open source", "drawing", "Constraint", "Multi-scale maps"),
Arrays
.asList(
"Where do people look at during multi-scale map tasks?", "FogDetector survey raw data",
"Collection of cartographic disorientation stories", "Anchorwhat dataset",
"BasqueRoads: A Benchmark for Road Network Selection",
"Progressive river network selection for pan-scalar maps",
"BasqueRoads, a dataset to benchmark road selection algorithms",
"Missing the city for buildings? A critical review of pan-scalar map generalization and design in contemporary zoomable maps",
"Empirical approach to advance the generalisation of multi-scale maps",
"L'Alpe d'Huez: a dataset to benchmark topographic map generalisation",
"eye-tracking data from a survey on zooming in a pan-scalar map",
"Material of the experiment 'More is Less' from the MapMuxing project",
"Cartagen4py, an open source Python library for map generalisation",
"LAlpe dHuez: A Benchmark for Topographic Map Generalisation"),
Arrays
.asList(
"50|doi_dedup___::6915135e0aa39f913394513f809ae58a",
"50|doi_dedup___::754e3c283639bc6e104c925ff3e34007",
"50|doi_dedup___::13517477f3c1261d57a3364363ce6ce0",
"50|doi_dedup___::675b16c73accc4e7242bbb4ed9b3724a",
"50|doi_dedup___::94ce09906b2d7d37eb2206cea8a50153",
"50|dedup_wf_002::cc575d5ca5651ff8c3029a3a76e7e70a",
"50|doi_dedup___::c5e52baddda17c755d1bae012a97dc13",
"50|doi_dedup___::4f5f38c9e08fe995f7278963183f8ad4",
"50|doi_dedup___::a9bc4453273b2d02648a5cb453195042",
"50|doi_dedup___::5e893dc0cb7624a33f41c9b428bd59f7",
"50|doi_dedup___::c1ecdef48fd9be811a291deed950e1c5",
"50|doi_dedup___::9e93c8f2d97c35de8a6a57a5b53ef283",
"50|dedup_wf_002::d08be0ed27b13d8a880e891e08d093ea",
"50|doi_dedup___::f8d8b3b9eddeca2fc0e3bc9e63996555"),
"Exploring Multi-Scale Map Generalization and Design",
"This project aims to advance the generalization of multi-scale maps by investigating the impact of different design elements on user experience. The research involves collecting and analyzing data from various sources, including surveys, eye-tracking studies, and user experiments. The goal is to identify best practices for map generalization and design, with a focus on reducing disorientation and improving information retrieval during exploration. The project has led to the development of several datasets, including BasqueRoads, AnchorWhat, and L'Alpe d'Huez, which can be used to benchmark road selection algorithms and topographic map generalization techniques. The research has also resulted in the creation of a Python library, Cartagen4py, for map generalization. The findings of this project have the potential to improve the design and usability of multi-scale maps, making them more effective tools for navigation and information retrieval."));
OtherResearchProduct orp = (OtherResearchProduct) atomicActions.get(0).getPayload(); OtherResearchProduct orp = (OtherResearchProduct) atomicActions.get(0).getPayload();
Relation rel = (Relation) atomicActions.get(1).getPayload(); Relation rel = (Relation) atomicActions.get(1).getPayload();
assertEquals("Exploring Multi-Scale Map Generalization and Design", orp.getTitle().get(0).getValue()); assertEquals("Exploring Multi-Scale Map Generalization and Design", orp.getTitle().get(0).getValue());
assertEquals("50|raid________::759a564ce5cc7360cab030c517c7366b", rel.getSource()); assertEquals("50|raid________::759a564ce5cc7360cab030c517c7366b", rel.getSource());
assertEquals("50|doi_dedup___::6915135e0aa39f913394513f809ae58a", rel.getTarget()); assertEquals("50|doi_dedup___::6915135e0aa39f913394513f809ae58a", rel.getTarget());
} }
} }

View File

@ -63,7 +63,6 @@ class BioScholixTest extends AbstractVocabularyTest {
"0000000333457333", "0000000333457333",
"0000000335964515", "0000000335964515",
"0000000302921949", "0000000302921949",
"http://orcid.org/0000-0001-8567-3543", "http://orcid.org/0000-0001-8567-3543",
"http://orcid.org/0000-0001-7868-8528", "http://orcid.org/0000-0001-7868-8528",
"0000-0001-9189-1440", "0000-0001-9189-1440",

View File

@ -130,7 +130,7 @@ public class ResultTagger implements Serializable {
// log.info("Remove constraints for " + communityId); // log.info("Remove constraints for " + communityId);
if (conf.getRemoveConstraintsMap().keySet().contains(communityId) && if (conf.getRemoveConstraintsMap().keySet().contains(communityId) &&
conf.getRemoveConstraintsMap().get(communityId).getCriteria() != null && conf.getRemoveConstraintsMap().get(communityId).getCriteria() != null &&
!conf.getRemoveConstraintsMap().get(communityId).getCriteria().isEmpty() && !conf.getRemoveConstraintsMap().get(communityId).getCriteria().isEmpty() &&
conf conf
.getRemoveConstraintsMap() .getRemoveConstraintsMap()
.get(communityId) .get(communityId)
@ -228,7 +228,7 @@ public class ResultTagger implements Serializable {
.forEach(communityId -> { .forEach(communityId -> {
if (!removeCommunities.contains(communityId) && if (!removeCommunities.contains(communityId) &&
conf.getSelectionConstraintsMap().get(communityId).getCriteria() != null && conf.getSelectionConstraintsMap().get(communityId).getCriteria() != null &&
!conf.getSelectionConstraintsMap().get(communityId).getCriteria().isEmpty() && !conf.getSelectionConstraintsMap().get(communityId).getCriteria().isEmpty() &&
conf conf
.getSelectionConstraintsMap() .getSelectionConstraintsMap()
.get(communityId) .get(communityId)

View File

@ -915,7 +915,8 @@ class MappersTest {
@Test @Test
void testODFRecord_guidelines4() throws IOException { void testODFRecord_guidelines4() throws IOException {
final String xml = IOUtils.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_guidelines4.xml"))); final String xml = IOUtils
.toString(Objects.requireNonNull(getClass().getResourceAsStream("odf_guidelines4.xml")));
final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml); final List<Oaf> list = new OdfToOafMapper(vocs, false, true).processMdRecord(xml);
final Publication p = (Publication) list.get(0); final Publication p = (Publication) list.get(0);

View File

@ -5,7 +5,6 @@ import java.io.StringReader;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.dhp.schema.solr.PersonTopic;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.dom4j.Document; import org.dom4j.Document;
import org.dom4j.DocumentException; import org.dom4j.DocumentException;
@ -40,6 +39,7 @@ import eu.dnetlib.dhp.schema.solr.OpenAccessColor;
import eu.dnetlib.dhp.schema.solr.OpenAccessRoute; import eu.dnetlib.dhp.schema.solr.OpenAccessRoute;
import eu.dnetlib.dhp.schema.solr.Organization; import eu.dnetlib.dhp.schema.solr.Organization;
import eu.dnetlib.dhp.schema.solr.Person; import eu.dnetlib.dhp.schema.solr.Person;
import eu.dnetlib.dhp.schema.solr.PersonTopic;
import eu.dnetlib.dhp.schema.solr.Pid; import eu.dnetlib.dhp.schema.solr.Pid;
import eu.dnetlib.dhp.schema.solr.Project; import eu.dnetlib.dhp.schema.solr.Project;
import eu.dnetlib.dhp.schema.solr.Result; import eu.dnetlib.dhp.schema.solr.Result;
@ -216,11 +216,14 @@ public class ProvisionModelSupport {
} }
private static List<PersonTopic> mapPersonTopics(List<eu.dnetlib.dhp.schema.oaf.PersonTopic> subjects) { private static List<PersonTopic> mapPersonTopics(List<eu.dnetlib.dhp.schema.oaf.PersonTopic> subjects) {
return Optional.ofNullable(subjects) return Optional
.map(ss -> ss.stream() .ofNullable(subjects)
.map(ProvisionModelSupport::mapPersonTopic) .map(
.collect(Collectors.toList())) ss -> ss
.orElse(null); .stream()
.map(ProvisionModelSupport::mapPersonTopic)
.collect(Collectors.toList()))
.orElse(null);
} }
private static PersonTopic mapPersonTopic(eu.dnetlib.dhp.schema.oaf.PersonTopic pt) { private static PersonTopic mapPersonTopic(eu.dnetlib.dhp.schema.oaf.PersonTopic pt) {