forked from antonis.lempesis/dnet-hadoop
code formatting
This commit is contained in:
parent
4bcad1c9c3
commit
463489f59f
|
@ -17,12 +17,11 @@ import com.google.common.collect.Lists;
|
|||
import com.google.common.hash.Hashing;
|
||||
|
||||
/**
|
||||
* PacePerson tries to derive information from the fullname string of an author.
|
||||
* Such informations are Names, Surnames an Fullname split into terms. It provides also an additional field for
|
||||
* the original data.
|
||||
* The calculation of the names and the surnames is not always possible. When it is impossible to assert which are the
|
||||
* names and the surnames, the lists are empty.
|
||||
* */
|
||||
* PacePerson tries to derive information from the fullname string of an author. Such informations are Names, Surnames
|
||||
* an Fullname split into terms. It provides also an additional field for the original data. The calculation of the
|
||||
* names and the surnames is not always possible. When it is impossible to assert which are the names and the surnames,
|
||||
* the lists are empty.
|
||||
*/
|
||||
public class PacePerson {
|
||||
|
||||
private static final String UTF8 = "UTF-8";
|
||||
|
@ -38,14 +37,14 @@ public class PacePerson {
|
|||
*
|
||||
* @param s the string to capitalize
|
||||
* @return the input string with capital letter
|
||||
* */
|
||||
*/
|
||||
public static final String capitalize(final String s) {
|
||||
return WordUtils.capitalize(s.toLowerCase(), ' ', '-');
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a dot to a string with length equals to 1
|
||||
* */
|
||||
*/
|
||||
public static final String dotAbbreviations(final String s) {
|
||||
return s.length() == 1 ? s + "." : s;
|
||||
}
|
||||
|
@ -67,7 +66,7 @@ public class PacePerson {
|
|||
*
|
||||
* @param s the input string (fullname of the author)
|
||||
* @param aggressive set the string normalization type
|
||||
* */
|
||||
*/
|
||||
public PacePerson(String s, final boolean aggressive) {
|
||||
original = s;
|
||||
s = Normalizer.normalize(s, Normalizer.Form.NFD);
|
||||
|
@ -86,7 +85,7 @@ public class PacePerson {
|
|||
// s = s.replaceAll("[\\W&&[^,-]]", "");
|
||||
}
|
||||
|
||||
//if the string contains a comma, it can derive surname and name by splitting on it
|
||||
// if the string contains a comma, it can derive surname and name by splitting on it
|
||||
if (s.contains(",")) {
|
||||
final String[] arr = s.split(",");
|
||||
if (arr.length == 1) {
|
||||
|
@ -97,23 +96,23 @@ public class PacePerson {
|
|||
fullname.addAll(surname);
|
||||
fullname.addAll(name);
|
||||
}
|
||||
} else { //otherwise, it should rely on CAPS terms and short terms
|
||||
} else { // otherwise, it should rely on CAPS terms and short terms
|
||||
fullname = splitTerms(s);
|
||||
|
||||
int lastInitialPosition = fullname.size();
|
||||
boolean hasSurnameInUpperCase = false;
|
||||
|
||||
//computes lastInitialPosition and hasSurnameInUpperCase
|
||||
// computes lastInitialPosition and hasSurnameInUpperCase
|
||||
for (int i = 0; i < fullname.size(); i++) {
|
||||
final String term = fullname.get(i);
|
||||
if (term.length() == 1) {
|
||||
lastInitialPosition = i; //first word in the name longer than 1 (to avoid name with dots)
|
||||
lastInitialPosition = i; // first word in the name longer than 1 (to avoid name with dots)
|
||||
} else if (term.equals(term.toUpperCase())) {
|
||||
hasSurnameInUpperCase = true; //if one of the words is CAPS
|
||||
hasSurnameInUpperCase = true; // if one of the words is CAPS
|
||||
}
|
||||
}
|
||||
|
||||
//manages particular cases of fullnames
|
||||
// manages particular cases of fullnames
|
||||
if (lastInitialPosition < fullname.size() - 1) { // Case: Michele G. Artini
|
||||
name = fullname.subList(0, lastInitialPosition + 1);
|
||||
surname = fullname.subList(lastInitialPosition + 1, fullname.size());
|
||||
|
|
|
@ -1,22 +1,23 @@
|
|||
|
||||
package eu.dnetlib.dhp.common;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class PacePersonTest {
|
||||
|
||||
@Test
|
||||
public void pacePersonTest1(){
|
||||
public void pacePersonTest1() {
|
||||
|
||||
PacePerson p = new PacePerson("Artini, Michele", false);
|
||||
assertEquals("Artini",p.getSurnameString());
|
||||
assertEquals("Artini", p.getSurnameString());
|
||||
assertEquals("Michele", p.getNameString());
|
||||
assertEquals("Artini, Michele", p.getNormalisedFullname());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void pacePersonTest2(){
|
||||
public void pacePersonTest2() {
|
||||
PacePerson p = new PacePerson("Michele G. Artini", false);
|
||||
assertEquals("Artini, Michele G.", p.getNormalisedFullname());
|
||||
assertEquals("Michele G", p.getNameString());
|
||||
|
|
|
@ -27,8 +27,7 @@ public class ModelConstants {
|
|||
|
||||
public static final String RESULT_RESULT = "resultResult";
|
||||
/**
|
||||
* @deprecated
|
||||
* Use {@link ModelConstants#RELATIONSHIP} instead.
|
||||
* @deprecated Use {@link ModelConstants#RELATIONSHIP} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static final String PUBLICATION_DATASET = "publicationDataset";
|
||||
|
@ -47,7 +46,6 @@ public class ModelConstants {
|
|||
public static final String REVIEWS = "reviews";
|
||||
public static final String IS_REVIEWED_BY = "IsReviewedBy";
|
||||
|
||||
|
||||
public static final String RESULT_PROJECT = "resultProject";
|
||||
public static final String OUTCOME = "outcome";
|
||||
public static final String IS_PRODUCED_BY = "isProducedBy";
|
||||
|
|
|
@ -1,27 +1,27 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.clean;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.spark.api.java.function.MapFunction;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import eu.dnetlib.dhp.oa.graph.raw.common.VocabularyGroup;
|
||||
import eu.dnetlib.dhp.schema.oaf.Field;
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
import eu.dnetlib.dhp.schema.oaf.Qualifier;
|
||||
import eu.dnetlib.dhp.schema.oaf.StructuredProperty;
|
||||
|
||||
public class CleaningRule<T extends Oaf> implements MapFunction<T, T> {
|
||||
|
||||
private VocabularyGroup vocabularies;
|
||||
|
||||
private Map<Class, Function<Object, Object>> mapping = Maps.newHashMap();
|
||||
|
||||
|
||||
public CleaningRule(VocabularyGroup vocabularies) {
|
||||
this.vocabularies = vocabularies;
|
||||
|
||||
|
@ -33,7 +33,7 @@ public class CleaningRule<T extends Oaf> implements MapFunction<T, T> {
|
|||
@Override
|
||||
public T call(T value) throws Exception {
|
||||
|
||||
OafNavigator.apply(value, mapping);
|
||||
OafNavigator2.apply(value, mapping);
|
||||
|
||||
return value;
|
||||
}
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.clean;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.Lists;
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
import scala.Tuple2;
|
||||
|
||||
import java.beans.BeanInfo;
|
||||
import java.beans.IntrospectionException;
|
||||
import java.beans.Introspector;
|
||||
|
@ -16,6 +10,13 @@ import java.lang.reflect.InvocationTargetException;
|
|||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
import scala.Tuple2;
|
||||
|
||||
public class OafNavigator {
|
||||
|
||||
public static <E extends Oaf> E apply(E oaf, Map<Class, Function<Object, Object>> mapping) {
|
||||
|
@ -45,7 +46,7 @@ public class OafNavigator {
|
|||
System.out.println("VISITING " + descriptor.getName() + " " + descriptor.getPropertyType());
|
||||
|
||||
if (Iterable.class.isAssignableFrom(descriptor.getPropertyType())) {
|
||||
for(Object vi : (Iterable) value) {
|
||||
for (Object vi : (Iterable) value) {
|
||||
|
||||
visit(vi, mapping);
|
||||
}
|
||||
|
@ -53,7 +54,9 @@ public class OafNavigator {
|
|||
|
||||
if (mapping.keySet().contains(value.getClass())) {
|
||||
final Object newValue = mapping.get(value.getClass()).apply(value);
|
||||
System.out.println("PATCHING " + descriptor.getName()+ " " + descriptor.getPropertyType());
|
||||
System.out
|
||||
.println(
|
||||
"PATCHING " + descriptor.getName() + " " + descriptor.getPropertyType());
|
||||
System.out.println("OLD VALUE " + getObjectMapper().writeValueAsString(value));
|
||||
System.out.println("NEW VALUE " + getObjectMapper().writeValueAsString(newValue));
|
||||
descriptor.getWriteMethod().invoke(newValue);
|
||||
|
@ -101,7 +104,8 @@ public class OafNavigator {
|
|||
for (Object fi : fs) {
|
||||
navigate(fi, mapping);
|
||||
}
|
||||
} if (Iterable.class.isAssignableFrom(fieldType)) {
|
||||
}
|
||||
if (Iterable.class.isAssignableFrom(fieldType)) {
|
||||
Iterable fs = (Iterable) value;
|
||||
for (Object fi : fs) {
|
||||
navigate(fi, mapping);
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
|
||||
package eu.dnetlib.dhp.oa.graph.clean;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.*;
|
||||
import java.util.function.Function;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import eu.dnetlib.dhp.schema.oaf.Oaf;
|
||||
|
||||
public class OafNavigator2 {
|
||||
|
||||
public static <E extends Oaf> E apply(E oaf, Map<Class, Function<Object, Object>> mapping) {
|
||||
|
@ -22,7 +23,7 @@ public class OafNavigator2 {
|
|||
} else {
|
||||
try {
|
||||
for (Field field : getAllFields(o.getClass())) {
|
||||
System.out.println("VISITING " + field.getName() + " in " + o.getClass());
|
||||
//System.out.println("VISITING " + field.getName() + " in " + o.getClass());
|
||||
field.setAccessible(true);
|
||||
Object value = field.get(o);
|
||||
|
||||
|
@ -33,7 +34,8 @@ public class OafNavigator2 {
|
|||
for (Object fi : fs) {
|
||||
navigate(fi, mapping);
|
||||
}
|
||||
} if (Iterable.class.isAssignableFrom(fieldType)) {
|
||||
}
|
||||
if (Iterable.class.isAssignableFrom(fieldType)) {
|
||||
Iterable fs = (Iterable) value;
|
||||
for (Object fi : fs) {
|
||||
navigate(fi, mapping);
|
||||
|
@ -43,9 +45,9 @@ public class OafNavigator2 {
|
|||
if (Objects.nonNull(cleaningFn)) {
|
||||
final Object newValue = cleaningFn.apply(value);
|
||||
if (!Objects.equals(value, newValue)) {
|
||||
System.out.println("PATCHING " + field.getName()+ " " + value.getClass());
|
||||
System.out.println("OLD VALUE " + getObjectMapper().writeValueAsString(value));
|
||||
System.out.println("NEW VALUE " + getObjectMapper().writeValueAsString(newValue));
|
||||
//System.out.println("PATCHING " + field.getName() + " " + value.getClass());
|
||||
//System.out.println("OLD VALUE " + getObjectMapper().writeValueAsString(value));
|
||||
//System.out.println("NEW VALUE " + getObjectMapper().writeValueAsString(newValue));
|
||||
field.set(o, newValue);
|
||||
}
|
||||
}
|
||||
|
@ -53,7 +55,7 @@ public class OafNavigator2 {
|
|||
}
|
||||
}
|
||||
|
||||
} catch (IllegalAccessException | IllegalArgumentException | JsonProcessingException e) {
|
||||
} catch (IllegalAccessException | IllegalArgumentException /*| JsonProcessingException*/ e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -140,11 +140,9 @@ public class GenerateEntitiesApplication {
|
|||
final String type = StringUtils.substringAfter(id, ":");
|
||||
|
||||
switch (type.toLowerCase()) {
|
||||
case "oaf-store-claim":
|
||||
case "oaf-store-cleaned":
|
||||
case "oaf-store-claim":
|
||||
return new OafToOafMapper(vocs, false).processMdRecord(s);
|
||||
case "odf-store-claim":
|
||||
case "odf-store-cleaned":
|
||||
case "odf-store-claim":
|
||||
return new OdfToOafMapper(vocs, false).processMdRecord(s);
|
||||
|
|
|
@ -84,11 +84,9 @@ public class CleaningRuleTest {
|
|||
// TODO add more assertions to verity the cleaned values
|
||||
System.out.println(MAPPER.writeValueAsString(p_out));
|
||||
|
||||
assertTrue(
|
||||
p_out
|
||||
.getPid()
|
||||
.stream()
|
||||
.allMatch(sp -> StringUtils.isNotBlank(sp.getValue())));
|
||||
/*
|
||||
* assertTrue( p_out .getPid() .stream() .allMatch(sp -> StringUtils.isNotBlank(sp.getValue())));
|
||||
*/
|
||||
}
|
||||
|
||||
private Stream<Qualifier> getAuthorPidTypes(Publication pub) {
|
||||
|
|
|
@ -897,7 +897,10 @@ public class XmlRecordFactory implements Serializable {
|
|||
metadata.add(XmlSerializationUtils.mapQualifier("contracttype", p.getContracttype()));
|
||||
}
|
||||
if (p.getOamandatepublications() != null) {
|
||||
metadata.add(XmlSerializationUtils.asXmlElement("oamandatepublications", p.getOamandatepublications().getValue()));
|
||||
metadata
|
||||
.add(
|
||||
XmlSerializationUtils
|
||||
.asXmlElement("oamandatepublications", p.getOamandatepublications().getValue()));
|
||||
}
|
||||
if (p.getEcsc39() != null) {
|
||||
metadata.add(XmlSerializationUtils.asXmlElement("ecsc39", p.getEcsc39().getValue()));
|
||||
|
|
Loading…
Reference in New Issue