New sources formatted by maven plugin

This commit is contained in:
Giambattista Bloisi 2023-07-06 10:28:53 +02:00
parent bd3fcf869a
commit 801da2fd4a
105 changed files with 5610 additions and 5267 deletions

View File

@ -1,9 +1,5 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.common.AbstractPaceFunctions; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import org.apache.commons.lang3.StringUtils;
import java.util.Collection; import java.util.Collection;
import java.util.HashSet; import java.util.HashSet;
@ -11,33 +7,41 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
public abstract class AbstractClusteringFunction extends AbstractPaceFunctions implements ClusteringFunction { public abstract class AbstractClusteringFunction extends AbstractPaceFunctions implements ClusteringFunction {
protected Map<String, Integer> params; protected Map<String, Integer> params;
public AbstractClusteringFunction(final Map<String, Integer> params) { public AbstractClusteringFunction(final Map<String, Integer> params) {
this.params = params; this.params = params;
} }
protected abstract Collection<String> doApply(Config conf, String s); protected abstract Collection<String> doApply(Config conf, String s);
@Override @Override
public Collection<String> apply(Config conf, List<Field> fields) { public Collection<String> apply(Config conf, List<Field> fields) {
return fields.stream().filter(f -> !f.isEmpty()) return fields
.map(Field::stringValue) .stream()
.map(this::normalize) .filter(f -> !f.isEmpty())
.map(s -> filterAllStopWords(s)) .map(Field::stringValue)
.map(s -> doApply(conf, s)) .map(this::normalize)
.map(c -> filterBlacklisted(c, ngramBlacklist)) .map(s -> filterAllStopWords(s))
.flatMap(c -> c.stream()) .map(s -> doApply(conf, s))
.filter(StringUtils::isNotBlank) .map(c -> filterBlacklisted(c, ngramBlacklist))
.collect(Collectors.toCollection(HashSet::new)); .flatMap(c -> c.stream())
.filter(StringUtils::isNotBlank)
.collect(Collectors.toCollection(HashSet::new));
} }
public Map<String, Integer> getParams() { public Map<String, Integer> getParams() {
return params; return params;
} }
protected Integer param(String name) { protected Integer param(String name) {
return params.get(name); return params.get(name);
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -6,6 +7,7 @@ import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ClusteringClass("acronyms") @ClusteringClass("acronyms")
@ -19,16 +21,16 @@ public class Acronyms extends AbstractClusteringFunction {
protected Collection<String> doApply(Config conf, String s) { protected Collection<String> doApply(Config conf, String s) {
return extractAcronyms(s, param("max"), param("minLen"), param("maxLen")); return extractAcronyms(s, param("max"), param("minLen"), param("maxLen"));
} }
private Set<String> extractAcronyms(final String s, int maxAcronyms, int minLen, int maxLen) { private Set<String> extractAcronyms(final String s, int maxAcronyms, int minLen, int maxLen) {
final Set<String> acronyms = Sets.newLinkedHashSet(); final Set<String> acronyms = Sets.newLinkedHashSet();
for (int i = 0; i < maxAcronyms; i++) { for (int i = 0; i < maxAcronyms; i++) {
final StringTokenizer st = new StringTokenizer(s); final StringTokenizer st = new StringTokenizer(s);
final StringBuilder sb = new StringBuilder(); final StringBuilder sb = new StringBuilder();
while (st.hasMoreTokens()) { while (st.hasMoreTokens()) {
final String token = st.nextToken(); final String token = st.nextToken();
if (sb.length() > maxLen) { if (sb.length() > maxLen) {

View File

@ -1,11 +1,5 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Maps; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Document;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.MapDocument;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
@ -13,47 +7,54 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Document;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.MapDocument;
public class BlacklistAwareClusteringCombiner extends ClusteringCombiner { public class BlacklistAwareClusteringCombiner extends ClusteringCombiner {
public static Collection<String> filterAndCombine(final MapDocument a, final Config conf) { public static Collection<String> filterAndCombine(final MapDocument a, final Config conf) {
Document filtered = filter(a, conf.blacklists()); Document filtered = filter(a, conf.blacklists());
return combine(filtered, conf); return combine(filtered, conf);
} }
private static MapDocument filter(final MapDocument a, final Map<String, List<Pattern>> blacklists) { private static MapDocument filter(final MapDocument a, final Map<String, List<Pattern>> blacklists) {
if (blacklists == null || blacklists.isEmpty()) { if (blacklists == null || blacklists.isEmpty()) {
return a; return a;
} }
final Map<String, Field> filtered = Maps.newHashMap(a.getFieldMap()); final Map<String, Field> filtered = Maps.newHashMap(a.getFieldMap());
for (final Entry<String, List<Pattern>> e : blacklists.entrySet()) { for (final Entry<String, List<Pattern>> e : blacklists.entrySet()) {
Field fields = a.getFieldMap().get(e.getKey()); Field fields = a.getFieldMap().get(e.getKey());
if (fields != null) { if (fields != null) {
final FieldListImpl fl = new FieldListImpl(); final FieldListImpl fl = new FieldListImpl();
for (Field f : fields) { for (Field f : fields) {
if (!isBlackListed(f.stringValue(), e.getValue())) { if (!isBlackListed(f.stringValue(), e.getValue())) {
fl.add(f); fl.add(f);
} }
} }
filtered.put(e.getKey(), fl); filtered.put(e.getKey(), fl);
} }
} }
return new MapDocument(a.getIdentifier(), filtered); return new MapDocument(a.getIdentifier(), filtered);
} }
private static boolean isBlackListed(String value, List<Pattern> blacklist) { private static boolean isBlackListed(String value, List<Pattern> blacklist) {
for (Pattern pattern : blacklist) { for (Pattern pattern : blacklist) {
if (pattern.matcher(value).matches()) { if (pattern.matcher(value).matches()) {
return true; return true;
} }
} }
return false; return false;
} }
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
@ -9,5 +10,5 @@ import java.lang.annotation.Target;
@Target(ElementType.TYPE) @Target(ElementType.TYPE)
public @interface ClusteringClass { public @interface ClusteringClass {
public String value(); public String value();
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.ArrayList; import java.util.ArrayList;
@ -5,6 +6,8 @@ import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ -12,12 +15,11 @@ import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.Document; import eu.dnetlib.pace.model.Document;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldValueImpl; import eu.dnetlib.pace.model.FieldValueImpl;
import org.apache.commons.lang3.StringUtils;
public class ClusteringCombiner { public class ClusteringCombiner {
private static String SEPARATOR = ":"; private static String SEPARATOR = ":";
private static String COLLAPSE_ON= "collapseOn"; private static String COLLAPSE_ON = "collapseOn";
public static Collection<String> combine(final Document a, final Config conf) { public static Collection<String> combine(final Document a, final Config conf) {
final Collection<String> res = Sets.newLinkedHashSet(); final Collection<String> res = Sets.newLinkedHashSet();
@ -30,31 +32,33 @@ public class ClusteringCombiner {
if (values instanceof FieldValueImpl) { if (values instanceof FieldValueImpl) {
fields.add(values); fields.add(values);
} } else {
else {
fields.addAll((List<Field>) values); fields.addAll((List<Field>) values);
} }
res.addAll( res
cd.clusteringFunction() .addAll(
.apply(conf, fields) cd
.stream() .clusteringFunction()
.map(k -> prefix + SEPARATOR +k) .apply(conf, fields)
.collect(Collectors.toList()) .stream()
); .map(k -> prefix + SEPARATOR + k)
.collect(Collectors.toList()));
} }
} }
return res; return res;
} }
private static String getPrefix(ClusteringDef cd, String fieldName) { private static String getPrefix(ClusteringDef cd, String fieldName) {
return cd.getName()+ SEPARATOR + return cd.getName() + SEPARATOR +
cd.getParams().keySet() cd
.stream() .getParams()
.filter(k -> k.contains(COLLAPSE_ON)) .keySet()
.findFirst() .stream()
.map(k -> StringUtils.substringAfter(k, SEPARATOR)) .filter(k -> k.contains(COLLAPSE_ON))
.orElse(fieldName); .findFirst()
.map(k -> StringUtils.substringAfter(k, SEPARATOR))
.orElse(fieldName);
} }
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -8,9 +9,9 @@ import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
public interface ClusteringFunction { public interface ClusteringFunction {
public Collection<String> apply(Config config, List<Field> fields); public Collection<String> apply(Config config, List<Field> fields);
public Map<String, Integer> getParams(); public Map<String, Integer> getParams();
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -5,6 +6,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ClusteringClass("immutablefieldvalue") @ClusteringClass("immutablefieldvalue")

View File

@ -1,53 +1,57 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.common.AbstractPaceFunctions; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import org.apache.commons.lang3.StringUtils;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
@ClusteringClass("keywordsclustering") @ClusteringClass("keywordsclustering")
public class KeywordsClustering extends AbstractClusteringFunction { public class KeywordsClustering extends AbstractClusteringFunction {
public KeywordsClustering(Map<String, Integer> params) { public KeywordsClustering(Map<String, Integer> params) {
super(params); super(params);
} }
@Override @Override
protected Collection<String> doApply(final Config conf, String s) { protected Collection<String> doApply(final Config conf, String s) {
//takes city codes and keywords codes without duplicates // takes city codes and keywords codes without duplicates
Set<String> keywords = getKeywords(s, conf.translationMap(), params.getOrDefault("windowSize", 4)); Set<String> keywords = getKeywords(s, conf.translationMap(), params.getOrDefault("windowSize", 4));
Set<String> cities = getCities(s, params.getOrDefault("windowSize", 4)); Set<String> cities = getCities(s, params.getOrDefault("windowSize", 4));
//list of combination to return as result // list of combination to return as result
final Collection<String> combinations = new LinkedHashSet<String>(); final Collection<String> combinations = new LinkedHashSet<String>();
for (String keyword: keywordsToCodes(keywords, conf.translationMap())){ for (String keyword : keywordsToCodes(keywords, conf.translationMap())) {
for (String city: citiesToCodes(cities)) { for (String city : citiesToCodes(cities)) {
combinations.add(keyword+"-"+city); combinations.add(keyword + "-" + city);
if (combinations.size()>=params.getOrDefault("max", 2)) { if (combinations.size() >= params.getOrDefault("max", 2)) {
return combinations; return combinations;
} }
} }
} }
return combinations; return combinations;
} }
@Override @Override
public Collection<String> apply(final Config conf, List<Field> fields) { public Collection<String> apply(final Config conf, List<Field> fields) {
return fields.stream().filter(f -> !f.isEmpty()) return fields
.map(Field::stringValue) .stream()
.map(this::cleanup) .filter(f -> !f.isEmpty())
.map(this::normalize) .map(Field::stringValue)
.map(s -> filterAllStopWords(s)) .map(this::cleanup)
.map(s -> doApply(conf, s)) .map(this::normalize)
.map(c -> filterBlacklisted(c, ngramBlacklist)) .map(s -> filterAllStopWords(s))
.flatMap(c -> c.stream()) .map(s -> doApply(conf, s))
.filter(StringUtils::isNotBlank) .map(c -> filterBlacklisted(c, ngramBlacklist))
.collect(Collectors.toCollection(HashSet::new)); .flatMap(c -> c.stream())
} .filter(StringUtils::isNotBlank)
} .collect(Collectors.toCollection(HashSet::new));
}
}

View File

@ -1,77 +1,81 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Lists; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
import org.apache.commons.lang3.StringUtils;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
@ClusteringClass("lnfi") @ClusteringClass("lnfi")
public class LastNameFirstInitial extends AbstractClusteringFunction{ public class LastNameFirstInitial extends AbstractClusteringFunction {
private boolean DEFAULT_AGGRESSIVE = true; private boolean DEFAULT_AGGRESSIVE = true;
public LastNameFirstInitial(final Map<String, Integer> params) { public LastNameFirstInitial(final Map<String, Integer> params) {
super(params); super(params);
} }
@Override @Override
public Collection<String> apply(Config conf, List<Field> fields) { public Collection<String> apply(Config conf, List<Field> fields) {
return fields.stream().filter(f -> !f.isEmpty()) return fields
.map(Field::stringValue) .stream()
.map(this::normalize) .filter(f -> !f.isEmpty())
.map(s -> doApply(conf, s)) .map(Field::stringValue)
.map(c -> filterBlacklisted(c, ngramBlacklist)) .map(this::normalize)
.flatMap(c -> c.stream()) .map(s -> doApply(conf, s))
.filter(StringUtils::isNotBlank) .map(c -> filterBlacklisted(c, ngramBlacklist))
.collect(Collectors.toCollection(HashSet::new)); .flatMap(c -> c.stream())
} .filter(StringUtils::isNotBlank)
.collect(Collectors.toCollection(HashSet::new));
}
@Override @Override
protected String normalize(final String s) { protected String normalize(final String s) {
return fixAliases(transliterate(nfd(unicodeNormalization(s)))) return fixAliases(transliterate(nfd(unicodeNormalization(s))))
// do not compact the regexes in a single expression, would cause StackOverflowError in case of large input strings // do not compact the regexes in a single expression, would cause StackOverflowError in case of large input
.replaceAll("[^ \\w]+", "") // strings
.replaceAll("(\\p{InCombiningDiacriticalMarks})+", "") .replaceAll("[^ \\w]+", "")
.replaceAll("(\\p{Punct})+", " ") .replaceAll("(\\p{InCombiningDiacriticalMarks})+", "")
.replaceAll("(\\d)+", " ") .replaceAll("(\\p{Punct})+", " ")
.replaceAll("(\\n)+", " ") .replaceAll("(\\d)+", " ")
.trim(); .replaceAll("(\\n)+", " ")
} .trim();
}
@Override @Override
protected Collection<String> doApply(final Config conf, final String s) { protected Collection<String> doApply(final Config conf, final String s) {
final List<String> res = Lists.newArrayList(); final List<String> res = Lists.newArrayList();
final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive") : DEFAULT_AGGRESSIVE); final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive")
: DEFAULT_AGGRESSIVE);
Person p = new Person(s, aggressive); Person p = new Person(s, aggressive);
if (p.isAccurate()) { if (p.isAccurate()) {
String lastName = p.getNormalisedSurname().toLowerCase(); String lastName = p.getNormalisedSurname().toLowerCase();
String firstInitial = p.getNormalisedFirstName().toLowerCase().substring(0,1); String firstInitial = p.getNormalisedFirstName().toLowerCase().substring(0, 1);
res.add(firstInitial.concat(lastName)); res.add(firstInitial.concat(lastName));
} } else { // is not accurate, meaning it has no defined name and surname
else { // is not accurate, meaning it has no defined name and surname List<String> fullname = Arrays.asList(p.getNormalisedFullname().split(" "));
List<String> fullname = Arrays.asList(p.getNormalisedFullname().split(" ")); if (fullname.size() == 1) {
if (fullname.size() == 1) { res.add(p.getNormalisedFullname().toLowerCase());
res.add(p.getNormalisedFullname().toLowerCase()); } else if (fullname.size() == 2) {
} res.add(fullname.get(0).substring(0, 1).concat(fullname.get(1)).toLowerCase());
else if (fullname.size() == 2) { res.add(fullname.get(1).substring(0, 1).concat(fullname.get(0)).toLowerCase());
res.add(fullname.get(0).substring(0,1).concat(fullname.get(1)).toLowerCase()); } else {
res.add(fullname.get(1).substring(0,1).concat(fullname.get(0)).toLowerCase()); res.add(fullname.get(0).substring(0, 1).concat(fullname.get(fullname.size() - 1)).toLowerCase());
} res.add(fullname.get(fullname.size() - 1).substring(0, 1).concat(fullname.get(0)).toLowerCase());
else { }
res.add(fullname.get(0).substring(0,1).concat(fullname.get(fullname.size()-1)).toLowerCase()); }
res.add(fullname.get(fullname.size()-1).substring(0,1).concat(fullname.get(0)).toLowerCase());
}
}
return res; return res;
} }
} }

View File

@ -1,14 +1,17 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import org.apache.commons.lang3.StringUtils;
@ClusteringClass("lowercase") @ClusteringClass("lowercase")
public class LowercaseClustering extends AbstractClusteringFunction { public class LowercaseClustering extends AbstractClusteringFunction {
@ -20,7 +23,7 @@ public class LowercaseClustering extends AbstractClusteringFunction {
@Override @Override
public Collection<String> apply(Config conf, List<Field> fields) { public Collection<String> apply(Config conf, List<Field> fields) {
Collection<String> c = Sets.newLinkedHashSet(); Collection<String> c = Sets.newLinkedHashSet();
for(Field f : fields) { for (Field f : fields) {
c.addAll(doApply(conf, f.stringValue())); c.addAll(doApply(conf, f.stringValue()));
} }
return c; return c;
@ -28,7 +31,7 @@ public class LowercaseClustering extends AbstractClusteringFunction {
@Override @Override
protected Collection<String> doApply(final Config conf, final String s) { protected Collection<String> doApply(final Config conf, final String s) {
if(StringUtils.isBlank(s)) { if (StringUtils.isBlank(s)) {
return Lists.newArrayList(); return Lists.newArrayList();
} }
return Lists.newArrayList(s.toLowerCase().trim()); return Lists.newArrayList(s.toLowerCase().trim());

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Set; import java.util.Set;
@ -10,11 +11,14 @@ public class NGramUtils extends AbstractPaceFunctions {
private static final int SIZE = 100; private static final int SIZE = 100;
private static Set<String> stopwords = AbstractPaceFunctions.loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt"); private static Set<String> stopwords = AbstractPaceFunctions
.loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
public static String cleanupForOrdering(String s) { public static String cleanupForOrdering(String s) {
NGramUtils utils = new NGramUtils(); NGramUtils utils = new NGramUtils();
return (utils.filterStopWords(utils.normalize(s), stopwords) + StringUtils.repeat(" ", SIZE)).substring(0, SIZE).replaceAll(" ", ""); return (utils.filterStopWords(utils.normalize(s), stopwords) + StringUtils.repeat(" ", SIZE))
.substring(0, SIZE)
.replaceAll(" ", "");
} }
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -6,6 +7,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ClusteringClass("ngrampairs") @ClusteringClass("ngrampairs")
@ -14,7 +16,7 @@ public class NgramPairs extends Ngrams {
public NgramPairs(Map<String, Integer> params) { public NgramPairs(Map<String, Integer> params) {
super(params); super(params);
} }
@Override @Override
protected Collection<String> doApply(Config conf, String s) { protected Collection<String> doApply(Config conf, String s) {
return ngramPairs(Lists.newArrayList(getNgrams(s, param("ngramLen"), param("max") * 2, 1, 2)), param("max")); return ngramPairs(Lists.newArrayList(getNgrams(s, param("ngramLen"), param("max") * 2, 1, 2)), param("max"));
@ -28,7 +30,7 @@ public class NgramPairs extends Ngrams {
break; break;
} }
res.add(ngrams.get(i) + ngrams.get(j)); res.add(ngrams.get(i) + ngrams.get(j));
//System.out.println("-- " + concatNgrams); // System.out.println("-- " + concatNgrams);
} }
return res; return res;
} }

View File

@ -1,9 +1,10 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import java.util.*; import java.util.*;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("ngrams") @ClusteringClass("ngrams")
public class Ngrams extends AbstractClusteringFunction { public class Ngrams extends AbstractClusteringFunction {
@ -36,7 +37,7 @@ public class Ngrams extends AbstractClusteringFunction {
} }
} }
} }
//System.out.println(ngrams + " n: " + ngrams.size()); // System.out.println(ngrams + " n: " + ngrams.size());
return ngrams; return ngrams;
} }

View File

@ -1,17 +1,20 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Sets; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
import org.apache.commons.lang3.StringUtils;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.Person;
@ClusteringClass("personClustering") @ClusteringClass("personClustering")
public class PersonClustering extends AbstractPaceFunctions implements ClusteringFunction { public class PersonClustering extends AbstractPaceFunctions implements ClusteringFunction {
@ -31,7 +34,8 @@ public class PersonClustering extends AbstractPaceFunctions implements Clusterin
final Person person = new Person(f.stringValue(), false); final Person person = new Person(f.stringValue(), false);
if (StringUtils.isNotBlank(person.getNormalisedFirstName()) && StringUtils.isNotBlank(person.getNormalisedSurname())) { if (StringUtils.isNotBlank(person.getNormalisedFirstName())
&& StringUtils.isNotBlank(person.getNormalisedSurname())) {
hashes.add(firstLC(person.getNormalisedFirstName()) + person.getNormalisedSurname().toLowerCase()); hashes.add(firstLC(person.getNormalisedFirstName()) + person.getNormalisedSurname().toLowerCase());
} else { } else {
for (final String token1 : tokens(f.stringValue(), MAX_TOKENS)) { for (final String token1 : tokens(f.stringValue(), MAX_TOKENS)) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -22,7 +23,8 @@ public class PersonHash extends AbstractClusteringFunction {
protected Collection<String> doApply(final Config conf, final String s) { protected Collection<String> doApply(final Config conf, final String s) {
final List<String> res = Lists.newArrayList(); final List<String> res = Lists.newArrayList();
final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive") : DEFAULT_AGGRESSIVE); final boolean aggressive = (Boolean) (getParams().containsKey("aggressive") ? getParams().get("aggressive")
: DEFAULT_AGGRESSIVE);
res.add(new Person(s, aggressive).hash()); res.add(new Person(s, aggressive).hash());

View File

@ -1,10 +1,11 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
import java.util.Map; import java.util.Map;
import eu.dnetlib.pace.config.Config;
public class RandomClusteringFunction extends AbstractClusteringFunction { public class RandomClusteringFunction extends AbstractClusteringFunction {
public RandomClusteringFunction(Map<String, Integer> params) { public RandomClusteringFunction(Map<String, Integer> params) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.*; import java.util.*;
@ -5,6 +6,7 @@ import java.util.*;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ClusteringClass("sortedngrampairs") @ClusteringClass("sortedngrampairs")
@ -21,7 +23,9 @@ public class SortedNgramPairs extends NgramPairs {
Collections.sort(tokens); Collections.sort(tokens);
return ngramPairs(Lists.newArrayList(getNgrams(Joiner.on(" ").join(tokens), param("ngramLen"), param("max") * 2, 1, 2)), param("max")); return ngramPairs(
Lists.newArrayList(getNgrams(Joiner.on(" ").join(tokens), param("ngramLen"), param("max") * 2, 1, 2)),
param("max"));
} }
} }

View File

@ -1,15 +1,17 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import eu.dnetlib.pace.config.Config;
import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("spacetrimmingfieldvalue") @ClusteringClass("spacetrimmingfieldvalue")
public class SpaceTrimmingFieldValue extends AbstractClusteringFunction { public class SpaceTrimmingFieldValue extends AbstractClusteringFunction {
@ -21,7 +23,10 @@ public class SpaceTrimmingFieldValue extends AbstractClusteringFunction {
protected Collection<String> doApply(final Config conf, final String s) { protected Collection<String> doApply(final Config conf, final String s) {
final List<String> res = Lists.newArrayList(); final List<String> res = Lists.newArrayList();
res.add(StringUtils.isBlank(s) ? RandomStringUtils.random(getParams().get("randomLength")) : s.toLowerCase().replaceAll("\\s+", "")); res
.add(
StringUtils.isBlank(s) ? RandomStringUtils.random(getParams().get("randomLength"))
: s.toLowerCase().replaceAll("\\s+", ""));
return res; return res;
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -5,6 +6,7 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ClusteringClass("suffixprefix") @ClusteringClass("suffixprefix")
@ -18,7 +20,7 @@ public class SuffixPrefix extends AbstractClusteringFunction {
protected Collection<String> doApply(Config conf, String s) { protected Collection<String> doApply(Config conf, String s) {
return suffixPrefix(s, param("len"), param("max")); return suffixPrefix(s, param("len"), param("max"));
} }
private Collection<String> suffixPrefix(String s, int len, int max) { private Collection<String> suffixPrefix(String s, int len, int max) {
final Set<String> bigrams = Sets.newLinkedHashSet(); final Set<String> bigrams = Sets.newLinkedHashSet();
int i = 0; int i = 0;

View File

@ -1,8 +1,5 @@
package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.common.AbstractPaceFunctions; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
@ -12,43 +9,46 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
@ClusteringClass("urlclustering") @ClusteringClass("urlclustering")
public class UrlClustering extends AbstractPaceFunctions implements ClusteringFunction { public class UrlClustering extends AbstractPaceFunctions implements ClusteringFunction {
protected Map<String, Integer> params; protected Map<String, Integer> params;
public UrlClustering(final Map<String, Integer> params) { public UrlClustering(final Map<String, Integer> params) {
this.params = params; this.params = params;
} }
@Override @Override
public Collection<String> apply(final Config conf, List<Field> fields) { public Collection<String> apply(final Config conf, List<Field> fields) {
try { try {
return fields.stream() return fields
.filter(f -> !f.isEmpty()) .stream()
.map(Field::stringValue) .filter(f -> !f.isEmpty())
.map(this::asUrl) .map(Field::stringValue)
.map(URL::getHost) .map(this::asUrl)
.collect(Collectors.toCollection(HashSet::new)); .map(URL::getHost)
} .collect(Collectors.toCollection(HashSet::new));
catch (IllegalStateException e){ } catch (IllegalStateException e) {
return new HashSet<>(); return new HashSet<>();
} }
} }
@Override @Override
public Map<String, Integer> getParams() { public Map<String, Integer> getParams() {
return null; return null;
} }
private URL asUrl(String value) {
try {
return new URL(value);
} catch (MalformedURLException e) {
// should not happen as checked by pace typing
throw new IllegalStateException("invalid URL: " + value);
}
}
private URL asUrl(String value) {
try {
return new URL(value);
} catch (MalformedURLException e) {
// should not happen as checked by pace typing
throw new IllegalStateException("invalid URL: " + value);
}
}
} }

View File

@ -1,90 +1,91 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Sets; package eu.dnetlib.pace.clustering;
import eu.dnetlib.pace.config.Config;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
@ClusteringClass("wordsStatsSuffixPrefixChain") @ClusteringClass("wordsStatsSuffixPrefixChain")
public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction { public class WordsStatsSuffixPrefixChain extends AbstractClusteringFunction {
public WordsStatsSuffixPrefixChain(Map<String, Integer> params) { public WordsStatsSuffixPrefixChain(Map<String, Integer> params) {
super(params); super(params);
} }
@Override @Override
protected Collection<String> doApply(Config conf, String s) { protected Collection<String> doApply(Config conf, String s) {
return suffixPrefixChain(s, param("mod")); return suffixPrefixChain(s, param("mod"));
} }
private Collection<String> suffixPrefixChain(String s, int mod) { private Collection<String> suffixPrefixChain(String s, int mod) {
//create the list of words from the string (remove short words) // create the list of words from the string (remove short words)
List<String> wordsList = List<String> wordsList = Arrays
Arrays.stream(s.split(" ")) .stream(s.split(" "))
.filter(si -> si.length() > 3) .filter(si -> si.length() > 3)
.collect(Collectors.toList()); .collect(Collectors.toList());
final int words = wordsList.size(); final int words = wordsList.size();
final int letters = s.length(); final int letters = s.length();
//create the prefix: number of words + number of letters/mod // create the prefix: number of words + number of letters/mod
String prefix = words + "-" + letters/mod + "-"; String prefix = words + "-" + letters / mod + "-";
return doSuffixPrefixChain(wordsList, prefix); return doSuffixPrefixChain(wordsList, prefix);
} }
private Collection<String> doSuffixPrefixChain(List<String> wordsList, String prefix) { private Collection<String> doSuffixPrefixChain(List<String> wordsList, String prefix) {
Set<String> set = Sets.newLinkedHashSet(); Set<String> set = Sets.newLinkedHashSet();
switch(wordsList.size()){ switch (wordsList.size()) {
case 0: case 0:
case 1: case 1:
break; break;
case 2: case 2:
set.add( set
prefix + .add(
suffix(wordsList.get(0), 3) + prefix +
prefix(wordsList.get(1), 3) suffix(wordsList.get(0), 3) +
); prefix(wordsList.get(1), 3));
set.add( set
prefix + .add(
prefix(wordsList.get(0), 3) + prefix +
suffix(wordsList.get(1), 3) prefix(wordsList.get(0), 3) +
); suffix(wordsList.get(1), 3));
break; break;
default: default:
set.add( set
prefix + .add(
suffix(wordsList.get(0), 3) + prefix +
prefix(wordsList.get(1), 3) + suffix(wordsList.get(0), 3) +
suffix(wordsList.get(2), 3) prefix(wordsList.get(1), 3) +
); suffix(wordsList.get(2), 3));
set.add( set
prefix + .add(
prefix(wordsList.get(0), 3) + prefix +
suffix(wordsList.get(1), 3) + prefix(wordsList.get(0), 3) +
prefix(wordsList.get(2), 3) suffix(wordsList.get(1), 3) +
); prefix(wordsList.get(2), 3));
break; break;
} }
return set; return set;
} }
private String suffix(String s, int len) {
return s.substring(s.length() - len);
}
private String suffix(String s, int len) { private String prefix(String s, int len) {
return s.substring(s.length()-len); return s.substring(0, len);
} }
private String prefix(String s, int len) {
return s.substring(0, len);
}
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.clustering; package eu.dnetlib.pace.clustering;
import java.util.Collection; import java.util.Collection;
@ -5,53 +6,54 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ClusteringClass("wordssuffixprefix") @ClusteringClass("wordssuffixprefix")
public class WordsSuffixPrefix extends AbstractClusteringFunction { public class WordsSuffixPrefix extends AbstractClusteringFunction {
public WordsSuffixPrefix(Map<String, Integer> params) { public WordsSuffixPrefix(Map<String, Integer> params) {
super(params); super(params);
} }
@Override @Override
protected Collection<String> doApply(Config conf, String s) { protected Collection<String> doApply(Config conf, String s) {
return suffixPrefix(s, param("len"), param("max")); return suffixPrefix(s, param("len"), param("max"));
} }
private Collection<String> suffixPrefix(String s, int len, int max) { private Collection<String> suffixPrefix(String s, int len, int max) {
final int words = s.split(" ").length; final int words = s.split(" ").length;
// adjust the token length according to the number of words // adjust the token length according to the number of words
switch (words) { switch (words) {
case 1: case 1:
return Sets.newLinkedHashSet(); return Sets.newLinkedHashSet();
case 2: case 2:
return doSuffixPrefix(s, len+2, max, words); return doSuffixPrefix(s, len + 2, max, words);
case 3: case 3:
return doSuffixPrefix(s, len+1, max, words); return doSuffixPrefix(s, len + 1, max, words);
default: default:
return doSuffixPrefix(s, len, max, words); return doSuffixPrefix(s, len, max, words);
} }
} }
private Collection<String> doSuffixPrefix(String s, int len, int max, int words) { private Collection<String> doSuffixPrefix(String s, int len, int max, int words) {
final Set<String> bigrams = Sets.newLinkedHashSet(); final Set<String> bigrams = Sets.newLinkedHashSet();
int i = 0; int i = 0;
while (++i < s.length() && bigrams.size() < max) { while (++i < s.length() && bigrams.size() < max) {
int j = s.indexOf(" ", i); int j = s.indexOf(" ", i);
int offset = j + len + 1 < s.length() ? j + len + 1 : s.length(); int offset = j + len + 1 < s.length() ? j + len + 1 : s.length();
if (j - len > 0) { if (j - len > 0) {
String bigram = s.substring(j - len, offset).replaceAll(" ", "").trim(); String bigram = s.substring(j - len, offset).replaceAll(" ", "").trim();
if (bigram.length() >= 4) { if (bigram.length() >= 4) {
bigrams.add(words+bigram); bigrams.add(words + bigram);
} }
} }
} }
return bigrams; return bigrams;
} }
} }

View File

@ -1,16 +1,5 @@
package eu.dnetlib.pace.common;
import com.google.common.base.Joiner; package eu.dnetlib.pace.common;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.ibm.icu.text.Transliterator;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.FieldListImpl;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException; import java.io.IOException;
import java.io.StringWriter; import java.io.StringWriter;
@ -21,6 +10,20 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.ibm.icu.text.Transliterator;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.FieldListImpl;
/** /**
* Set of common functions for the framework * Set of common functions for the framework
* *
@ -28,330 +31,337 @@ import java.util.stream.Collectors;
*/ */
public abstract class AbstractPaceFunctions { public abstract class AbstractPaceFunctions {
//city map to be used when translating the city names into codes // city map to be used when translating the city names into codes
private static Map<String, String> cityMap = AbstractPaceFunctions.loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv"); private static Map<String, String> cityMap = AbstractPaceFunctions
.loadMapFromClasspath("/eu/dnetlib/pace/config/city_map.csv");
//list of stopwords in different languages
protected static Set<String> stopwords_gr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_gr.txt"); // list of stopwords in different languages
protected static Set<String> stopwords_en = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt"); protected static Set<String> stopwords_gr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_gr.txt");
protected static Set<String> stopwords_de = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_de.txt"); protected static Set<String> stopwords_en = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_en.txt");
protected static Set<String> stopwords_es = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_es.txt"); protected static Set<String> stopwords_de = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_de.txt");
protected static Set<String> stopwords_fr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_fr.txt"); protected static Set<String> stopwords_es = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_es.txt");
protected static Set<String> stopwords_it = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_it.txt"); protected static Set<String> stopwords_fr = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_fr.txt");
protected static Set<String> stopwords_pt = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_pt.txt"); protected static Set<String> stopwords_it = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_it.txt");
protected static Set<String> stopwords_pt = loadFromClasspath("/eu/dnetlib/pace/config/stopwords_pt.txt");
//transliterator
protected static Transliterator transliterator = Transliterator.getInstance("Any-Eng"); // transliterator
protected static Transliterator transliterator = Transliterator.getInstance("Any-Eng");
//blacklist of ngrams: to avoid generic keys
protected static Set<String> ngramBlacklist = loadFromClasspath("/eu/dnetlib/pace/config/ngram_blacklist.txt"); // blacklist of ngrams: to avoid generic keys
protected static Set<String> ngramBlacklist = loadFromClasspath("/eu/dnetlib/pace/config/ngram_blacklist.txt");
//html regex for normalization
public final String HTML_REGEX = "<[^>]*>"; // html regex for normalization
public final String HTML_REGEX = "<[^>]*>";
private static final String alpha = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 ";
private static final String aliases_from = "⁰¹²³⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎àáâäæãåāèéêëēėęəîïíīįìôöòóœøōõûüùúūßśšłžźżçćčñń"; private static final String alpha = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 ";
private static final String aliases_to = "0123456789+-=()n0123456789+-=()aaaaaaaaeeeeeeeeiiiiiioooooooouuuuussslzzzcccnn"; private static final String aliases_from = "⁰¹²³⁴⁵⁶⁷⁸⁹⁺⁻⁼⁽⁾ⁿ₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎àáâäæãåāèéêëēėęəîïíīįìôöòóœøōõûüùúūßśšłžźżçćčñń";
private static final String aliases_to = "0123456789+-=()n0123456789+-=()aaaaaaaaeeeeeeeeiiiiiioooooooouuuuussslzzzcccnn";
//doi prefix for normalization
public final String DOI_PREFIX = "(https?:\\/\\/dx\\.doi\\.org\\/)|(doi:)"; // doi prefix for normalization
public final String DOI_PREFIX = "(https?:\\/\\/dx\\.doi\\.org\\/)|(doi:)";
private Pattern numberPattern = Pattern.compile("-?\\d+(\\.\\d+)?");
private Pattern numberPattern = Pattern.compile("-?\\d+(\\.\\d+)?");
private Pattern hexUnicodePattern = Pattern.compile("\\\\u(\\p{XDigit}{4})");
private Pattern hexUnicodePattern = Pattern.compile("\\\\u(\\p{XDigit}{4})");
protected final static FieldList EMPTY_FIELD = new FieldListImpl();
protected final static FieldList EMPTY_FIELD = new FieldListImpl();
protected String concat(final List<String> l) {
return Joiner.on(" ").skipNulls().join(l); protected String concat(final List<String> l) {
} return Joiner.on(" ").skipNulls().join(l);
}
protected String cleanup(final String s) {
protected String cleanup(final String s) {
final String s1 = s.replaceAll(HTML_REGEX, "");
final String s2 = unicodeNormalization(s1.toLowerCase()); final String s1 = s.replaceAll(HTML_REGEX, "");
final String s3 = nfd(s2); final String s2 = unicodeNormalization(s1.toLowerCase());
final String s4 = fixXML(s3); final String s3 = nfd(s2);
final String s5 = s4.replaceAll("([0-9]+)", " $1 "); final String s4 = fixXML(s3);
final String s6 = transliterate(s5); final String s5 = s4.replaceAll("([0-9]+)", " $1 ");
final String s7 = fixAliases(s6); final String s6 = transliterate(s5);
final String s8 = s7.replaceAll("[^\\p{ASCII}]", ""); final String s7 = fixAliases(s6);
final String s9 = s8.replaceAll("[\\p{Punct}]", " "); final String s8 = s7.replaceAll("[^\\p{ASCII}]", "");
final String s10 = s9.replaceAll("\\n", " "); final String s9 = s8.replaceAll("[\\p{Punct}]", " ");
final String s11 = s10.replaceAll("(?m)\\s+", " "); final String s10 = s9.replaceAll("\\n", " ");
final String s12 = s11.trim(); final String s11 = s10.replaceAll("(?m)\\s+", " ");
return s12; final String s12 = s11.trim();
} return s12;
}
protected String fixXML(final String a){
protected String fixXML(final String a) {
return a.replaceAll("&ndash;", " ")
.replaceAll("&amp;", " ") return a
.replaceAll("&quot;", " ") .replaceAll("&ndash;", " ")
.replaceAll("&minus;", " "); .replaceAll("&amp;", " ")
} .replaceAll("&quot;", " ")
.replaceAll("&minus;", " ");
protected boolean checkNumbers(final String a, final String b) { }
final String numbersA = getNumbers(a);
final String numbersB = getNumbers(b); protected boolean checkNumbers(final String a, final String b) {
final String romansA = getRomans(a); final String numbersA = getNumbers(a);
final String romansB = getRomans(b); final String numbersB = getNumbers(b);
return !numbersA.equals(numbersB) || !romansA.equals(romansB); final String romansA = getRomans(a);
} final String romansB = getRomans(b);
return !numbersA.equals(numbersB) || !romansA.equals(romansB);
protected String getRomans(final String s) { }
final StringBuilder sb = new StringBuilder();
for (final String t : s.split(" ")) { protected String getRomans(final String s) {
sb.append(isRoman(t) ? t : ""); final StringBuilder sb = new StringBuilder();
} for (final String t : s.split(" ")) {
return sb.toString(); sb.append(isRoman(t) ? t : "");
} }
return sb.toString();
protected boolean isRoman(final String s) { }
return s.replaceAll("^M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})$", "qwertyuiop").equals("qwertyuiop");
} protected boolean isRoman(final String s) {
return s
protected String getNumbers(final String s) { .replaceAll("^M{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3})$", "qwertyuiop")
final StringBuilder sb = new StringBuilder(); .equals("qwertyuiop");
for (final String t : s.split(" ")) { }
sb.append(isNumber(t) ? t : "");
} protected String getNumbers(final String s) {
return sb.toString(); final StringBuilder sb = new StringBuilder();
} for (final String t : s.split(" ")) {
sb.append(isNumber(t) ? t : "");
public boolean isNumber(String strNum) { }
if (strNum == null) { return sb.toString();
return false; }
}
return numberPattern.matcher(strNum).matches(); public boolean isNumber(String strNum) {
} if (strNum == null) {
return false;
protected static String fixAliases(final String s) { }
final StringBuilder sb = new StringBuilder(); return numberPattern.matcher(strNum).matches();
}
s.chars().forEach(ch -> {
final int i = StringUtils.indexOf(aliases_from, ch); protected static String fixAliases(final String s) {
sb.append(i >= 0 ? aliases_to.charAt(i) : (char)ch); final StringBuilder sb = new StringBuilder();
});
s.chars().forEach(ch -> {
return sb.toString(); final int i = StringUtils.indexOf(aliases_from, ch);
} sb.append(i >= 0 ? aliases_to.charAt(i) : (char) ch);
});
protected static String transliterate(final String s) {
try { return sb.toString();
return transliterator.transliterate(s); }
}
catch(Exception e) { protected static String transliterate(final String s) {
return s; try {
} return transliterator.transliterate(s);
} } catch (Exception e) {
return s;
protected String removeSymbols(final String s) { }
final StringBuilder sb = new StringBuilder(); }
s.chars().forEach(ch -> { protected String removeSymbols(final String s) {
sb.append(StringUtils.contains(alpha, ch) ? (char)ch : ' '); final StringBuilder sb = new StringBuilder();
});
s.chars().forEach(ch -> {
return sb.toString().replaceAll("\\s+", " "); sb.append(StringUtils.contains(alpha, ch) ? (char) ch : ' ');
} });
protected String getFirstValue(final Field values) { return sb.toString().replaceAll("\\s+", " ");
return (values != null) && !Iterables.isEmpty(values) ? Iterables.getFirst(values, EMPTY_FIELD).stringValue() : ""; }
}
protected String getFirstValue(final Field values) {
protected boolean notNull(final String s) { return (values != null) && !Iterables.isEmpty(values) ? Iterables.getFirst(values, EMPTY_FIELD).stringValue()
return s != null; : "";
} }
protected String normalize(final String s) { protected boolean notNull(final String s) {
return fixAliases(transliterate(nfd(unicodeNormalization(s)))) return s != null;
.toLowerCase() }
// do not compact the regexes in a single expression, would cause StackOverflowError in case of large input strings
.replaceAll("[^ \\w]+", "") protected String normalize(final String s) {
.replaceAll("(\\p{InCombiningDiacriticalMarks})+", "") return fixAliases(transliterate(nfd(unicodeNormalization(s))))
.replaceAll("(\\p{Punct})+", " ") .toLowerCase()
.replaceAll("(\\d)+", " ") // do not compact the regexes in a single expression, would cause StackOverflowError in case of large input
.replaceAll("(\\n)+", " ") // strings
.trim(); .replaceAll("[^ \\w]+", "")
} .replaceAll("(\\p{InCombiningDiacriticalMarks})+", "")
.replaceAll("(\\p{Punct})+", " ")
public String nfd(final String s) { .replaceAll("(\\d)+", " ")
return Normalizer.normalize(s, Normalizer.Form.NFD); .replaceAll("(\\n)+", " ")
} .trim();
}
public String utf8(final String s) {
byte[] bytes = s.getBytes(StandardCharsets.UTF_8); public String nfd(final String s) {
return new String(bytes, StandardCharsets.UTF_8); return Normalizer.normalize(s, Normalizer.Form.NFD);
} }
public String unicodeNormalization(final String s) { public String utf8(final String s) {
byte[] bytes = s.getBytes(StandardCharsets.UTF_8);
Matcher m = hexUnicodePattern.matcher(s); return new String(bytes, StandardCharsets.UTF_8);
StringBuffer buf = new StringBuffer(s.length()); }
while (m.find()) {
String ch = String.valueOf((char) Integer.parseInt(m.group(1), 16)); public String unicodeNormalization(final String s) {
m.appendReplacement(buf, Matcher.quoteReplacement(ch));
} Matcher m = hexUnicodePattern.matcher(s);
m.appendTail(buf); StringBuffer buf = new StringBuffer(s.length());
return buf.toString(); while (m.find()) {
} String ch = String.valueOf((char) Integer.parseInt(m.group(1), 16));
m.appendReplacement(buf, Matcher.quoteReplacement(ch));
protected String filterStopWords(final String s, final Set<String> stopwords) { }
final StringTokenizer st = new StringTokenizer(s); m.appendTail(buf);
final StringBuilder sb = new StringBuilder(); return buf.toString();
while (st.hasMoreTokens()) { }
final String token = st.nextToken();
if (!stopwords.contains(token)) { protected String filterStopWords(final String s, final Set<String> stopwords) {
sb.append(token); final StringTokenizer st = new StringTokenizer(s);
sb.append(" "); final StringBuilder sb = new StringBuilder();
} while (st.hasMoreTokens()) {
} final String token = st.nextToken();
return sb.toString().trim(); if (!stopwords.contains(token)) {
} sb.append(token);
sb.append(" ");
public String filterAllStopWords(String s) { }
}
s = filterStopWords(s, stopwords_en); return sb.toString().trim();
s = filterStopWords(s, stopwords_de); }
s = filterStopWords(s, stopwords_it);
s = filterStopWords(s, stopwords_fr); public String filterAllStopWords(String s) {
s = filterStopWords(s, stopwords_pt);
s = filterStopWords(s, stopwords_es); s = filterStopWords(s, stopwords_en);
s = filterStopWords(s, stopwords_gr); s = filterStopWords(s, stopwords_de);
s = filterStopWords(s, stopwords_it);
return s; s = filterStopWords(s, stopwords_fr);
} s = filterStopWords(s, stopwords_pt);
s = filterStopWords(s, stopwords_es);
protected Collection<String> filterBlacklisted(final Collection<String> set, final Set<String> ngramBlacklist) { s = filterStopWords(s, stopwords_gr);
final Set<String> newset = Sets.newLinkedHashSet();
for (final String s : set) { return s;
if (!ngramBlacklist.contains(s)) { }
newset.add(s);
} protected Collection<String> filterBlacklisted(final Collection<String> set, final Set<String> ngramBlacklist) {
} final Set<String> newset = Sets.newLinkedHashSet();
return newset; for (final String s : set) {
} if (!ngramBlacklist.contains(s)) {
newset.add(s);
public static Set<String> loadFromClasspath(final String classpath) { }
}
Transliterator transliterator = Transliterator.getInstance("Any-Eng"); return newset;
}
final Set<String> h = Sets.newHashSet();
try { public static Set<String> loadFromClasspath(final String classpath) {
for (final String s : IOUtils.readLines(NGramUtils.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
h.add(fixAliases(transliterator.transliterate(s))); //transliteration of the stopwords Transliterator transliterator = Transliterator.getInstance("Any-Eng");
}
} catch (final Throwable e) { final Set<String> h = Sets.newHashSet();
return Sets.newHashSet(); try {
} for (final String s : IOUtils
return h; .readLines(NGramUtils.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
} h.add(fixAliases(transliterator.transliterate(s))); // transliteration of the stopwords
}
public static Map<String, String> loadMapFromClasspath(final String classpath) { } catch (final Throwable e) {
return Sets.newHashSet();
Transliterator transliterator = Transliterator.getInstance("Any-Eng"); }
return h;
final Map<String, String> m = new HashMap<>(); }
try {
for (final String s : IOUtils.readLines(AbstractPaceFunctions.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) { public static Map<String, String> loadMapFromClasspath(final String classpath) {
//string is like this: code;word1;word2;word3
String[] line = s.split(";"); Transliterator transliterator = Transliterator.getInstance("Any-Eng");
String value = line[0];
for (int i = 1; i < line.length; i++) { final Map<String, String> m = new HashMap<>();
m.put(fixAliases(transliterator.transliterate(line[i].toLowerCase())), value); try {
} for (final String s : IOUtils
} .readLines(AbstractPaceFunctions.class.getResourceAsStream(classpath), StandardCharsets.UTF_8)) {
} catch (final Throwable e) { // string is like this: code;word1;word2;word3
return new HashMap<>(); String[] line = s.split(";");
} String value = line[0];
return m; for (int i = 1; i < line.length; i++) {
} m.put(fixAliases(transliterator.transliterate(line[i].toLowerCase())), value);
}
public String removeKeywords(String s, Set<String> keywords) { }
} catch (final Throwable e) {
s = " " + s + " "; return new HashMap<>();
for (String k : keywords) { }
s = s.replaceAll(k.toLowerCase(), ""); return m;
} }
return s.trim(); public String removeKeywords(String s, Set<String> keywords) {
}
s = " " + s + " ";
public double commonElementsPercentage(Set<String> s1, Set<String> s2) { for (String k : keywords) {
s = s.replaceAll(k.toLowerCase(), "");
double longer = Math.max(s1.size(), s2.size()); }
return (double) s1.stream().filter(s2::contains).count() / longer;
} return s.trim();
}
//convert the set of keywords to codes
public Set<String> toCodes(Set<String> keywords, Map<String, String> translationMap) { public double commonElementsPercentage(Set<String> s1, Set<String> s2) {
return keywords.stream().map(s -> translationMap.get(s)).collect(Collectors.toSet());
} double longer = Math.max(s1.size(), s2.size());
return (double) s1.stream().filter(s2::contains).count() / longer;
public Set<String> keywordsToCodes(Set<String> keywords, Map<String, String> translationMap) { }
return toCodes(keywords, translationMap);
} // convert the set of keywords to codes
public Set<String> toCodes(Set<String> keywords, Map<String, String> translationMap) {
public Set<String> citiesToCodes(Set<String> keywords) { return keywords.stream().map(s -> translationMap.get(s)).collect(Collectors.toSet());
return toCodes(keywords, cityMap); }
}
public Set<String> keywordsToCodes(Set<String> keywords, Map<String, String> translationMap) {
protected String firstLC(final String s) { return toCodes(keywords, translationMap);
return StringUtils.substring(s, 0, 1).toLowerCase(); }
}
public Set<String> citiesToCodes(Set<String> keywords) {
protected Iterable<String> tokens(final String s, final int maxTokens) { return toCodes(keywords, cityMap);
return Iterables.limit(Splitter.on(" ").omitEmptyStrings().trimResults().split(s), maxTokens); }
}
protected String firstLC(final String s) {
public String normalizePid(String pid) { return StringUtils.substring(s, 0, 1).toLowerCase();
return pid.toLowerCase().replaceAll(DOI_PREFIX, ""); }
}
protected Iterable<String> tokens(final String s, final int maxTokens) {
//get the list of keywords into the input string return Iterables.limit(Splitter.on(" ").omitEmptyStrings().trimResults().split(s), maxTokens);
public Set<String> getKeywords(String s1, Map<String, String> translationMap, int windowSize) { }
String s = s1; public String normalizePid(String pid) {
return pid.toLowerCase().replaceAll(DOI_PREFIX, "");
List<String> tokens = Arrays.asList(s.toLowerCase().split(" ")); }
Set<String> codes = new HashSet<>(); // get the list of keywords into the input string
public Set<String> getKeywords(String s1, Map<String, String> translationMap, int windowSize) {
if (tokens.size() < windowSize)
windowSize = tokens.size(); String s = s1;
int length = windowSize; List<String> tokens = Arrays.asList(s.toLowerCase().split(" "));
while (length != 0) { Set<String> codes = new HashSet<>();
for (int i = 0; i <= tokens.size() - length; i++) { if (tokens.size() < windowSize)
String candidate = concat(tokens.subList(i, i + length)); windowSize = tokens.size();
if (translationMap.containsKey(candidate)) {
codes.add(candidate); int length = windowSize;
s = s.replace(candidate, "").trim();
} while (length != 0) {
}
for (int i = 0; i <= tokens.size() - length; i++) {
tokens = Arrays.asList(s.split(" ")); String candidate = concat(tokens.subList(i, i + length));
length -= 1; if (translationMap.containsKey(candidate)) {
} codes.add(candidate);
s = s.replace(candidate, "").trim();
return codes; }
} }
public Set<String> getCities(String s1, int windowSize) { tokens = Arrays.asList(s.split(" "));
return getKeywords(s1, cityMap, windowSize); length -= 1;
} }
public static <T> String readFromClasspath(final String filename, final Class<T> clazz) { return codes;
final StringWriter sw = new StringWriter(); }
try {
IOUtils.copy(clazz.getResourceAsStream(filename), sw, StandardCharsets.UTF_8); public Set<String> getCities(String s1, int windowSize) {
return sw.toString(); return getKeywords(s1, cityMap, windowSize);
} catch (final IOException e) { }
throw new RuntimeException("cannot load resource from classpath: " + filename);
} public static <T> String readFromClasspath(final String filename, final Class<T> clazz) {
} final StringWriter sw = new StringWriter();
try {
IOUtils.copy(clazz.getResourceAsStream(filename), sw, StandardCharsets.UTF_8);
return sw.toString();
} catch (final IOException e) {
throw new RuntimeException("cannot load resource from classpath: " + filename);
}
}
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.config; package eu.dnetlib.pace.config;
import java.util.List; import java.util.List;
@ -50,7 +51,6 @@ public interface Config {
*/ */
public Map<String, List<Pattern>> blacklists(); public Map<String, List<Pattern>> blacklists();
/** /**
* Translation map. * Translation map.
* *

View File

@ -1,16 +1,5 @@
package eu.dnetlib.pace.config;
import com.fasterxml.jackson.annotation.JsonIgnore; package eu.dnetlib.pace.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.FieldDef;
import eu.dnetlib.pace.util.PaceException;
import org.antlr.stringtemplate.StringTemplate;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
@ -22,9 +11,20 @@ import java.util.Map.Entry;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.antlr.stringtemplate.StringTemplate;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Maps;
import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.FieldDef;
import eu.dnetlib.pace.tree.support.TreeNodeDef; import eu.dnetlib.pace.tree.support.TreeNodeDef;
import eu.dnetlib.pace.util.PaceException;
public class DedupConfig implements Config, Serializable { public class DedupConfig implements Config, Serializable {
@ -56,7 +56,8 @@ public class DedupConfig implements Config, Serializable {
defaults.put("idPath", "$.id"); defaults.put("idPath", "$.id");
} }
public DedupConfig() {} public DedupConfig() {
}
public static DedupConfig load(final String json) { public static DedupConfig load(final String json) {
@ -66,10 +67,21 @@ public class DedupConfig implements Config, Serializable {
config.getPace().initModel(); config.getPace().initModel();
config.getPace().initTranslationMap(); config.getPace().initTranslationMap();
config.blacklists = config.getPace().getBlacklists().entrySet() config.blacklists = config
.stream() .getPace()
.collect(Collectors.toMap(e -> e.getKey(), .getBlacklists()
e ->e.getValue().stream().filter(s -> !StringUtils.isBlank(s)).map(Pattern::compile).collect(Collectors.toList()) )); .entrySet()
.stream()
.collect(
Collectors
.toMap(
e -> e.getKey(),
e -> e
.getValue()
.stream()
.filter(s -> !StringUtils.isBlank(s))
.map(Pattern::compile)
.collect(Collectors.toList())));
return config; return config;
} catch (IOException e) { } catch (IOException e) {
@ -131,7 +143,7 @@ public class DedupConfig implements Config, Serializable {
} }
@Override @Override
public Map<String, TreeNodeDef> decisionTree(){ public Map<String, TreeNodeDef> decisionTree() {
return getPace().getDecisionTree(); return getPace().getDecisionTree();
} }

View File

@ -1,19 +1,20 @@
package eu.dnetlib.pace.config; package eu.dnetlib.pace.config;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.ibm.icu.text.Transliterator; import com.ibm.icu.text.Transliterator;
import eu.dnetlib.pace.common.AbstractPaceFunctions; import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.model.ClusteringDef; import eu.dnetlib.pace.model.ClusteringDef;
import eu.dnetlib.pace.model.FieldDef; import eu.dnetlib.pace.model.FieldDef;
import eu.dnetlib.pace.tree.support.TreeNodeDef; import eu.dnetlib.pace.tree.support.TreeNodeDef;
import eu.dnetlib.pace.util.PaceResolver; import eu.dnetlib.pace.util.PaceResolver;
import java.io.Serializable;
import java.util.List;
import java.util.Map;
public class PaceConfig extends AbstractPaceFunctions implements Serializable { public class PaceConfig extends AbstractPaceFunctions implements Serializable {
private List<FieldDef> model; private List<FieldDef> model;
@ -33,7 +34,8 @@ public class PaceConfig extends AbstractPaceFunctions implements Serializable {
@JsonIgnore @JsonIgnore
public static PaceResolver resolver = new PaceResolver(); public static PaceResolver resolver = new PaceResolver();
public PaceConfig() {} public PaceConfig() {
}
public void initModel() { public void initModel() {
modelMap = Maps.newHashMap(); modelMap = Maps.newHashMap();
@ -42,20 +44,21 @@ public class PaceConfig extends AbstractPaceFunctions implements Serializable {
} }
} }
public void initTranslationMap(){ public void initTranslationMap() {
translationMap = Maps.newHashMap(); translationMap = Maps.newHashMap();
Transliterator transliterator = Transliterator.getInstance("Any-Eng"); Transliterator transliterator = Transliterator.getInstance("Any-Eng");
for (String key : synonyms.keySet()) { for (String key : synonyms.keySet()) {
for (String term : synonyms.get(key)){ for (String term : synonyms.get(key)) {
translationMap.put( translationMap
.put(
fixAliases(transliterator.transliterate(term.toLowerCase())), fixAliases(transliterator.transliterate(term.toLowerCase())),
key); key);
} }
} }
} }
public Map<String, String> translationMap(){ public Map<String, String> translationMap() {
return translationMap; return translationMap;
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.config; package eu.dnetlib.pace.config;
public enum Type { public enum Type {

View File

@ -1,10 +1,5 @@
package eu.dnetlib.pace.config;
import com.fasterxml.jackson.databind.ObjectMapper; package eu.dnetlib.pace.config;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.util.PaceException;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
@ -12,6 +7,13 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.util.PaceException;
public class WfConfig implements Serializable { public class WfConfig implements Serializable {
@ -76,7 +78,6 @@ public class WfConfig implements Serializable {
/** Maximum number of allowed children. */ /** Maximum number of allowed children. */
private int maxChildren = MAX_CHILDREN; private int maxChildren = MAX_CHILDREN;
/** Default maximum number of iterations. */ /** Default maximum number of iterations. */
private final static int MAX_ITERATIONS = 20; private final static int MAX_ITERATIONS = 20;
@ -84,9 +85,10 @@ public class WfConfig implements Serializable {
private int maxIterations = MAX_ITERATIONS; private int maxIterations = MAX_ITERATIONS;
/** The Jquery path to retrieve the identifier */ /** The Jquery path to retrieve the identifier */
private String idPath = "$.id"; private String idPath = "$.id";
public WfConfig() {} public WfConfig() {
}
/** /**
* Instantiates a new dedup config. * Instantiates a new dedup config.
@ -114,8 +116,10 @@ public class WfConfig implements Serializable {
* @param idPath * @param idPath
* the path for the id of the entity * the path for the id of the entity
*/ */
public WfConfig(final String entityType, final String orderField, final List<String> rootBuilder, final String dedupRun, public WfConfig(final String entityType, final String orderField, final List<String> rootBuilder,
final Set<String> skipList, final int queueMaxSize, final int groupMaxSize, final int slidingWindowSize, final boolean includeChildren, final int maxIterations, final String idPath) { final String dedupRun,
final Set<String> skipList, final int queueMaxSize, final int groupMaxSize, final int slidingWindowSize,
final boolean includeChildren, final int maxIterations, final String idPath) {
super(); super();
this.entityType = entityType; this.entityType = entityType;
this.orderField = orderField; this.orderField = orderField;
@ -257,7 +261,6 @@ public class WfConfig implements Serializable {
this.maxChildren = maxChildren; this.maxChildren = maxChildren;
} }
public int getMaxIterations() { public int getMaxIterations() {
return maxIterations; return maxIterations;
} }
@ -277,7 +280,6 @@ public class WfConfig implements Serializable {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.lang.Object#toString() * @see java.lang.Object#toString()
*/ */
@Override @Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import eu.dnetlib.pace.config.Type; import eu.dnetlib.pace.config.Type;
@ -16,7 +17,8 @@ public abstract class AbstractField implements Field {
/** /**
* Instantiates a new abstract field. * Instantiates a new abstract field.
*/ */
protected AbstractField() {} protected AbstractField() {
}
/** /**
* Instantiates a new abstract field. * Instantiates a new abstract field.
@ -33,7 +35,6 @@ public abstract class AbstractField implements Field {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#getName() * @see eu.dnetlib.pace.model.Field#getName()
*/ */
@Override @Override
@ -43,7 +44,6 @@ public abstract class AbstractField implements Field {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#getType() * @see eu.dnetlib.pace.model.Field#getType()
*/ */
@Override @Override
@ -53,7 +53,6 @@ public abstract class AbstractField implements Field {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#setName(java.lang.String) * @see eu.dnetlib.pace.model.Field#setName(java.lang.String)
*/ */
@Override @Override
@ -63,7 +62,6 @@ public abstract class AbstractField implements Field {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#setType(eu.dnetlib.pace.config.Type) * @see eu.dnetlib.pace.model.Field#setType(eu.dnetlib.pace.config.Type)
*/ */
@Override @Override

View File

@ -1,15 +1,16 @@
package eu.dnetlib.pace.model;
import com.fasterxml.jackson.databind.ObjectMapper; package eu.dnetlib.pace.model;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.util.PaceException;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.util.PaceException;
public class ClusteringDef implements Serializable { public class ClusteringDef implements Serializable {
@ -19,7 +20,8 @@ public class ClusteringDef implements Serializable {
private Map<String, Integer> params; private Map<String, Integer> params;
public ClusteringDef() {} public ClusteringDef() {
}
public String getName() { public String getName() {
return name; return name;

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.util.Set; import java.util.Set;

View File

@ -1,9 +1,10 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import eu.dnetlib.pace.config.Type;
import java.io.Serializable; import java.io.Serializable;
import eu.dnetlib.pace.config.Type;
/** /**
* The Interface Field. * The Interface Field.
*/ */

View File

@ -1,13 +1,15 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Splitter; import com.google.common.base.Splitter;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Type;
import java.io.Serializable; import eu.dnetlib.pace.config.Type;
import java.util.List;
/** /**
* The schema is composed by field definitions (FieldDef). Each field has a type, a name, and an associated compare algorithm. * The schema is composed by field definitions (FieldDef). Each field has a type, a name, and an associated compare algorithm.
@ -34,19 +36,20 @@ public class FieldDef implements Serializable {
*/ */
private int length = -1; private int length = -1;
public FieldDef() {} public FieldDef() {
}
// def apply(s: String): Field[A] // def apply(s: String): Field[A]
public Field apply(final Type type, final String s) { public Field apply(final Type type, final String s) {
switch (type) { switch (type) {
case Int: case Int:
return new FieldValueImpl(type, name, Integer.parseInt(s)); return new FieldValueImpl(type, name, Integer.parseInt(s));
case String: case String:
return new FieldValueImpl(type, name, s); return new FieldValueImpl(type, name, s);
case List: case List:
return new FieldListImpl(name, type); return new FieldListImpl(name, type);
default: default:
throw new IllegalArgumentException("Casting not implemented for type " + type); throw new IllegalArgumentException("Casting not implemented for type " + type);
} }
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.util.List; import java.util.List;

View File

@ -1,17 +1,19 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.config.Type;
import java.util.Collection; import eu.dnetlib.pace.config.Type;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
/** /**
* The Class FieldListImpl. * The Class FieldListImpl.
@ -41,7 +43,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#add(java.lang.Object) * @see java.util.List#add(java.lang.Object)
*/ */
@Override @Override
@ -51,7 +52,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#add(int, java.lang.Object) * @see java.util.List#add(int, java.lang.Object)
*/ */
@Override @Override
@ -61,7 +61,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#addAll(java.util.Collection) * @see java.util.List#addAll(java.util.Collection)
*/ */
@Override @Override
@ -71,7 +70,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#addAll(int, java.util.Collection) * @see java.util.List#addAll(int, java.util.Collection)
*/ */
@Override @Override
@ -81,7 +79,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#clear() * @see java.util.List#clear()
*/ */
@Override @Override
@ -91,7 +88,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#contains(java.lang.Object) * @see java.util.List#contains(java.lang.Object)
*/ */
@Override @Override
@ -101,7 +97,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#containsAll(java.util.Collection) * @see java.util.List#containsAll(java.util.Collection)
*/ */
@Override @Override
@ -111,7 +106,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#get(int) * @see java.util.List#get(int)
*/ */
@Override @Override
@ -121,7 +115,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#indexOf(java.lang.Object) * @see java.util.List#indexOf(java.lang.Object)
*/ */
@Override @Override
@ -131,7 +124,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#isEmpty() * @see eu.dnetlib.pace.model.Field#isEmpty()
*/ */
@Override @Override
@ -141,7 +133,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.lang.Iterable#iterator() * @see java.lang.Iterable#iterator()
*/ */
@Override @Override
@ -151,7 +142,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#lastIndexOf(java.lang.Object) * @see java.util.List#lastIndexOf(java.lang.Object)
*/ */
@Override @Override
@ -161,7 +151,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#listIterator() * @see java.util.List#listIterator()
*/ */
@Override @Override
@ -171,7 +160,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#listIterator(int) * @see java.util.List#listIterator(int)
*/ */
@Override @Override
@ -181,7 +169,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#remove(java.lang.Object) * @see java.util.List#remove(java.lang.Object)
*/ */
@Override @Override
@ -191,7 +178,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#remove(int) * @see java.util.List#remove(int)
*/ */
@Override @Override
@ -201,7 +187,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#removeAll(java.util.Collection) * @see java.util.List#removeAll(java.util.Collection)
*/ */
@Override @Override
@ -211,7 +196,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#retainAll(java.util.Collection) * @see java.util.List#retainAll(java.util.Collection)
*/ */
@Override @Override
@ -221,7 +205,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#set(int, java.lang.Object) * @see java.util.List#set(int, java.lang.Object)
*/ */
@Override @Override
@ -231,7 +214,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#size() * @see java.util.List#size()
*/ */
@Override @Override
@ -241,7 +223,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#subList(int, int) * @see java.util.List#subList(int, int)
*/ */
@Override @Override
@ -251,7 +232,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#toArray() * @see java.util.List#toArray()
*/ */
@Override @Override
@ -261,7 +241,6 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.List#toArray(java.lang.Object[]) * @see java.util.List#toArray(java.lang.Object[])
*/ */
@Override @Override
@ -271,33 +250,31 @@ public class FieldListImpl extends AbstractField implements FieldList {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#stringValue() * @see eu.dnetlib.pace.model.Field#stringValue()
*/ */
@Override @Override
public String stringValue() { public String stringValue() {
switch (getType()) { switch (getType()) {
case List: case List:
case Int: case Int:
case String: case String:
return Joiner.on(" ").join(stringList()); return Joiner.on(" ").join(stringList());
case JSON: case JSON:
String json; String json;
try { try {
json = new ObjectMapper().writeValueAsString(this); json = new ObjectMapper().writeValueAsString(this);
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
json = null; json = null;
} }
return json; return json;
default: default:
throw new IllegalArgumentException("Unknown type: " + getType().toString()); throw new IllegalArgumentException("Unknown type: " + getType().toString());
} }
} }
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.FieldList#stringList() * @see eu.dnetlib.pace.model.FieldList#stringList()
*/ */
@Override @Override
@ -317,10 +294,10 @@ public class FieldListImpl extends AbstractField implements FieldList {
@Override @Override
public double[] doubleArray() { public double[] doubleArray() {
return Lists.newArrayList(Iterables.transform(fields, getDouble())).stream().mapToDouble(d-> d).toArray(); return Lists.newArrayList(Iterables.transform(fields, getDouble())).stream().mapToDouble(d -> d).toArray();
} }
private Function<Field,Double> getDouble() { private Function<Field, Double> getDouble() {
return new Function<Field, Double>() { return new Function<Field, Double>() {
@Override @Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
/** /**

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.net.MalformedURLException; import java.net.MalformedURLException;
@ -6,9 +7,10 @@ import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import eu.dnetlib.pace.config.Type;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.config.Type;
/** /**
* The Class FieldValueImpl. * The Class FieldValueImpl.
*/ */
@ -20,7 +22,8 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/** /**
* Instantiates a new field value impl. * Instantiates a new field value impl.
*/ */
public FieldValueImpl() {} public FieldValueImpl() {
}
/** /**
* Instantiates a new field value impl. * Instantiates a new field value impl.
@ -39,17 +42,17 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#isEmpty() * @see eu.dnetlib.pace.model.Field#isEmpty()
*/ */
@Override @Override
public boolean isEmpty() { public boolean isEmpty() {
if (value == null) return false; if (value == null)
return false;
switch (type) { switch (type) {
case String: case String:
case JSON: case JSON:
return value.toString().isEmpty(); return value.toString().isEmpty();
case List: case List:
try { try {
List<?> list = (List<?>) value; List<?> list = (List<?>) value;
@ -61,9 +64,9 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
String str = value.toString(); String str = value.toString();
return StringUtils.isBlank(str) || !isValidURL(str); return StringUtils.isBlank(str) || !isValidURL(str);
case DoubleArray: case DoubleArray:
return doubleArrayValue().length==0; return doubleArrayValue().length == 0;
default: default:
return true; return true;
} }
} }
@ -78,7 +81,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.FieldValue#getValue() * @see eu.dnetlib.pace.model.FieldValue#getValue()
*/ */
@Override @Override
@ -88,7 +90,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.FieldValue#setValue(java.lang.Object) * @see eu.dnetlib.pace.model.FieldValue#setValue(java.lang.Object)
*/ */
@Override @Override
@ -98,7 +99,6 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.Field#stringValue() * @see eu.dnetlib.pace.model.Field#stringValue()
*/ */
@Override @Override
@ -119,12 +119,11 @@ public class FieldValueImpl extends AbstractField implements FieldValue {
} }
public double[] doubleArrayValue() { public double[] doubleArrayValue() {
return (double[])getValue(); return (double[]) getValue();
} }
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.lang.Iterable#iterator() * @see java.lang.Iterable#iterator()
*/ */
@Override @Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.io.Serializable; import java.io.Serializable;
@ -57,7 +58,6 @@ public class MapDocument implements Document, Serializable {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#fields() * @see eu.dnetlib.pace.model.document.Document#fields()
*/ */
@Override @Override
@ -67,7 +67,6 @@ public class MapDocument implements Document, Serializable {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#values(java.lang.String) * @see eu.dnetlib.pace.model.document.Document#values(java.lang.String)
*/ */
@Override @Override
@ -77,7 +76,6 @@ public class MapDocument implements Document, Serializable {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#fieldNames() * @see eu.dnetlib.pace.model.document.Document#fieldNames()
*/ */
@Override @Override
@ -87,7 +85,6 @@ public class MapDocument implements Document, Serializable {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.lang.Object#toString() * @see java.lang.Object#toString()
*/ */
@Override @Override
@ -107,7 +104,6 @@ public class MapDocument implements Document, Serializable {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.model.document.Document#getIdentifier() * @see eu.dnetlib.pace.model.document.Document#getIdentifier()
*/ */
@Override @Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.util.Comparator; import java.util.Comparator;
@ -28,18 +29,19 @@ public class MapDocumentComparator implements Comparator<Document> {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object) * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/ */
@Override @Override
public int compare(final Document d1, final Document d2) { public int compare(final Document d1, final Document d2) {
if (d1.values(comparatorField).isEmpty() || d2.values(comparatorField).isEmpty()) return 0; if (d1.values(comparatorField).isEmpty() || d2.values(comparatorField).isEmpty())
return 0;
final String o1 = Iterables.getFirst(d1.values(comparatorField), emptyField).stringValue(); final String o1 = Iterables.getFirst(d1.values(comparatorField), emptyField).stringValue();
final String o2 = Iterables.getFirst(d2.values(comparatorField), emptyField).stringValue(); final String o2 = Iterables.getFirst(d2.values(comparatorField), emptyField).stringValue();
if ((o1 == null) || (o2 == null)) return 0; if ((o1 == null) || (o2 == null))
return 0;
final String to1 = NGramUtils.cleanupForOrdering(o1); final String to1 = NGramUtils.cleanupForOrdering(o1);
final String to2 = NGramUtils.cleanupForOrdering(o2); final String to2 = NGramUtils.cleanupForOrdering(o2);

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.lang.reflect.Type; import java.lang.reflect.Type;
@ -33,7 +34,8 @@ public class MapDocumentSerializer implements InstanceCreator<MapDocument> {
gson.registerTypeAdapter(Field.class, new JsonDeserializer<Field>() { gson.registerTypeAdapter(Field.class, new JsonDeserializer<Field>() {
@Override @Override
public Field deserialize(final JsonElement json, final Type typeOfT, final JsonDeserializationContext context) throws JsonParseException { public Field deserialize(final JsonElement json, final Type typeOfT,
final JsonDeserializationContext context) throws JsonParseException {
final FieldListImpl fl = new FieldListImpl(); final FieldListImpl fl = new FieldListImpl();
if (json.isJsonObject()) { if (json.isJsonObject()) {

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.nio.charset.Charset; import java.nio.charset.Charset;
@ -43,7 +44,7 @@ public class Person {
// s = s.replaceAll("[\\W&&[^,-]]", ""); // s = s.replaceAll("[\\W&&[^,-]]", "");
} }
if (s.contains(",")) { //if the name contains a comma it is easy derivable the name and the surname if (s.contains(",")) { // if the name contains a comma it is easy derivable the name and the surname
final String[] arr = s.split(","); final String[] arr = s.split(",");
if (arr.length == 1) { if (arr.length == 1) {
fullname = splitTerms(arr[0]); fullname = splitTerms(arr[0]);

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.model; package eu.dnetlib.pace.model;
import java.util.ArrayList; import java.util.ArrayList;
@ -57,7 +58,7 @@ public class PersonComparatorUtils {
private static boolean verifyNames(List<String> list1, List<String> list2) { private static boolean verifyNames(List<String> list1, List<String> list2) {
return verifySimilarity(extractExtendedNames(list1), extractExtendedNames(list2)) return verifySimilarity(extractExtendedNames(list1), extractExtendedNames(list2))
&& verifySimilarity(extractInitials(list1), extractInitials(list2)); && verifySimilarity(extractInitials(list1), extractInitials(list2));
} }
private static boolean verifySurnames(List<String> list1, List<String> list2) { private static boolean verifySurnames(List<String> list1, List<String> list2) {
@ -76,7 +77,7 @@ public class PersonComparatorUtils {
Collections.sort(list1); Collections.sort(list1);
Collections.sort(list2); Collections.sort(list2);
return verifySimilarity(extractExtendedNames(list1), extractExtendedNames(list2)) return verifySimilarity(extractExtendedNames(list1), extractExtendedNames(list2))
&& verifySimilarity(extractInitials(list1), extractInitials(list2)); && verifySimilarity(extractInitials(list1), extractInitials(list2));
} }
private static List<String> extractExtendedNames(List<String> list) { private static List<String> extractExtendedNames(List<String> list) {
@ -107,7 +108,7 @@ public class PersonComparatorUtils {
for (String s : list1) { for (String s : list1) {
int curr = list2.indexOf(s); int curr = list2.indexOf(s);
if (curr > pos) { if (curr > pos) {
list2.set(curr, "*"); // I invalidate the found element, example: "amm - amm" list2.set(curr, "*"); // I invalidate the found element, example: "amm - amm"
pos = curr; pos = curr;
} else { } else {
return false; return false;

View File

@ -1,42 +1,43 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("alwaysMatch") @ComparatorClass("alwaysMatch")
public class AlwaysMatch extends AbstractComparator { public class AlwaysMatch extends AbstractComparator {
public AlwaysMatch(final Map<String, String> params){ public AlwaysMatch(final Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
} }
public AlwaysMatch(final double weight) { public AlwaysMatch(final double weight) {
super(weight, new com.wcohen.ss.JaroWinkler()); super(weight, new com.wcohen.ss.JaroWinkler());
} }
protected AlwaysMatch(final double weight, final AbstractStringDistance ssalgo) { protected AlwaysMatch(final double weight, final AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
return 1.0; return 1.0;
} }
@Override @Override
public double getWeight() { public double getWeight() {
return super.weight; return super.weight;
} }
@Override @Override
protected double normalize(final double d) { protected double normalize(final double d) {
return d; return d;
} }
} }

View File

@ -1,13 +1,5 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Iterables; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.Person;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import com.wcohen.ss.AbstractStringDistance;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
@ -16,139 +8,164 @@ import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.google.common.collect.Iterables;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.model.Person;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("authorsMatch") @ComparatorClass("authorsMatch")
public class AuthorsMatch extends AbstractComparator { public class AuthorsMatch extends AbstractComparator {
Map<String, String> params; Map<String, String> params;
private double SURNAME_THRESHOLD; private double SURNAME_THRESHOLD;
private double NAME_THRESHOLD; private double NAME_THRESHOLD;
private double FULLNAME_THRESHOLD; private double FULLNAME_THRESHOLD;
private String MODE; //full or surname private String MODE; // full or surname
private int SIZE_THRESHOLD; private int SIZE_THRESHOLD;
private String TYPE; //count or percentage private String TYPE; // count or percentage
private int common; private int common;
public AuthorsMatch(Map<String, String> params){ public AuthorsMatch(Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
this.params = params; this.params = params;
MODE = params.getOrDefault("mode", "full"); MODE = params.getOrDefault("mode", "full");
SURNAME_THRESHOLD = Double.parseDouble(params.getOrDefault("surname_th", "0.95")); SURNAME_THRESHOLD = Double.parseDouble(params.getOrDefault("surname_th", "0.95"));
NAME_THRESHOLD = Double.parseDouble(params.getOrDefault("name_th", "0.95")); NAME_THRESHOLD = Double.parseDouble(params.getOrDefault("name_th", "0.95"));
FULLNAME_THRESHOLD = Double.parseDouble(params.getOrDefault("fullname_th", "0.9")); FULLNAME_THRESHOLD = Double.parseDouble(params.getOrDefault("fullname_th", "0.9"));
SIZE_THRESHOLD = Integer.parseInt(params.getOrDefault("size_th", "20")); SIZE_THRESHOLD = Integer.parseInt(params.getOrDefault("size_th", "20"));
TYPE = params.getOrDefault("type", "percentage"); TYPE = params.getOrDefault("type", "percentage");
common = 0; common = 0;
} }
protected AuthorsMatch(double w, AbstractStringDistance ssalgo) { protected AuthorsMatch(double w, AbstractStringDistance ssalgo) {
super(w, ssalgo); super(w, ssalgo);
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) if (a.isEmpty() || b.isEmpty())
return -1; return -1;
if (((FieldList) a).size() > SIZE_THRESHOLD || ((FieldList) b).size() > SIZE_THRESHOLD) if (((FieldList) a).size() > SIZE_THRESHOLD || ((FieldList) b).size() > SIZE_THRESHOLD)
return 1.0; return 1.0;
List<Person> aList = ((FieldList) a).stringList().stream().map(author -> new Person(author, false)).collect(Collectors.toList()); List<Person> aList = ((FieldList) a)
List<Person> bList = ((FieldList) b).stringList().stream().map(author -> new Person(author, false)).collect(Collectors.toList()); .stringList()
.stream()
.map(author -> new Person(author, false))
.collect(Collectors.toList());
List<Person> bList = ((FieldList) b)
.stringList()
.stream()
.map(author -> new Person(author, false))
.collect(Collectors.toList());
common = 0; common = 0;
//compare each element of List1 with each element of List2 // compare each element of List1 with each element of List2
for (Person p1 : aList) for (Person p1 : aList)
for (Person p2 : bList) { for (Person p2 : bList) {
//both persons are inaccurate // both persons are inaccurate
if (!p1.isAccurate() && !p2.isAccurate()) { if (!p1.isAccurate() && !p2.isAccurate()) {
//compare just normalized fullnames // compare just normalized fullnames
String fullname1 = normalization(p1.getNormalisedFullname().isEmpty()? p1.getOriginal() : p1.getNormalisedFullname()); String fullname1 = normalization(
String fullname2 = normalization(p2.getNormalisedFullname().isEmpty()? p2.getOriginal() : p2.getNormalisedFullname()); p1.getNormalisedFullname().isEmpty() ? p1.getOriginal() : p1.getNormalisedFullname());
String fullname2 = normalization(
p2.getNormalisedFullname().isEmpty() ? p2.getOriginal() : p2.getNormalisedFullname());
if (ssalgo.score(fullname1, fullname2) > FULLNAME_THRESHOLD) { if (ssalgo.score(fullname1, fullname2) > FULLNAME_THRESHOLD) {
common += 1; common += 1;
break; break;
} }
} }
//one person is inaccurate // one person is inaccurate
if (p1.isAccurate() ^ p2.isAccurate()) { if (p1.isAccurate() ^ p2.isAccurate()) {
//prepare data // prepare data
//data for the accurate person // data for the accurate person
String name = normalization(p1.isAccurate()? p1.getNormalisedFirstName() : p2.getNormalisedFirstName()); String name = normalization(
String surname = normalization(p1.isAccurate()? p1.getNormalisedSurname() : p2.getNormalisedSurname()); p1.isAccurate() ? p1.getNormalisedFirstName() : p2.getNormalisedFirstName());
String surname = normalization(
p1.isAccurate() ? p1.getNormalisedSurname() : p2.getNormalisedSurname());
//data for the inaccurate person // data for the inaccurate person
String fullname = normalization( String fullname = normalization(
p1.isAccurate() ? ((p2.getNormalisedFullname().isEmpty()) ? p2.getOriginal() : p2.getNormalisedFullname()) : (p1.getNormalisedFullname().isEmpty() ? p1.getOriginal() : p1.getNormalisedFullname()) p1.isAccurate()
); ? ((p2.getNormalisedFullname().isEmpty()) ? p2.getOriginal() : p2.getNormalisedFullname())
: (p1.getNormalisedFullname().isEmpty() ? p1.getOriginal() : p1.getNormalisedFullname()));
if (fullname.contains(surname)) { if (fullname.contains(surname)) {
if (MODE.equals("full")) { if (MODE.equals("full")) {
if (fullname.contains(name)) { if (fullname.contains(name)) {
common += 1; common += 1;
break; break;
} }
} } else { // MODE equals "surname"
else { //MODE equals "surname" common += 1;
common += 1; break;
break; }
} }
} }
}
//both persons are accurate // both persons are accurate
if (p1.isAccurate() && p2.isAccurate()) { if (p1.isAccurate() && p2.isAccurate()) {
if (compareSurname(p1, p2)) { if (compareSurname(p1, p2)) {
if (MODE.equals("full")) { if (MODE.equals("full")) {
if(compareFirstname(p1, p2)) { if (compareFirstname(p1, p2)) {
common += 1; common += 1;
break; break;
} }
} } else { // MODE equals "surname"
else { //MODE equals "surname" common += 1;
common += 1; break;
break; }
} }
}
} }
} }
//normalization factor to compute the score // normalization factor to compute the score
int normFactor = aList.size() == bList.size() ? aList.size() : (aList.size() + bList.size() - common); int normFactor = aList.size() == bList.size() ? aList.size() : (aList.size() + bList.size() - common);
if(TYPE.equals("percentage")) { if (TYPE.equals("percentage")) {
return (double) common / normFactor; return (double) common / normFactor;
} } else {
else { return (double) common;
return (double) common; }
} }
}
public boolean compareSurname(Person p1, Person p2) { public boolean compareSurname(Person p1, Person p2) {
return ssalgo.score(normalization(p1.getNormalisedSurname()), normalization(p2.getNormalisedSurname())) > SURNAME_THRESHOLD; return ssalgo
} .score(
normalization(p1.getNormalisedSurname()), normalization(p2.getNormalisedSurname())) > SURNAME_THRESHOLD;
}
public boolean compareFirstname(Person p1, Person p2) { public boolean compareFirstname(Person p1, Person p2) {
if(p1.getNormalisedFirstName().length()<=2 || p2.getNormalisedFirstName().length()<=2) { if (p1.getNormalisedFirstName().length() <= 2 || p2.getNormalisedFirstName().length() <= 2) {
if (firstLC(p1.getNormalisedFirstName()).equals(firstLC(p2.getNormalisedFirstName()))) if (firstLC(p1.getNormalisedFirstName()).equals(firstLC(p2.getNormalisedFirstName())))
return true; return true;
} }
return ssalgo.score(normalization(p1.getNormalisedFirstName()), normalization(p2.getNormalisedFirstName())) > NAME_THRESHOLD; return ssalgo
} .score(
normalization(p1.getNormalisedFirstName()),
normalization(p2.getNormalisedFirstName())) > NAME_THRESHOLD;
}
public String normalization(String s) { public String normalization(String s) {
return normalize(utf8(cleanup(s))); return normalize(utf8(cleanup(s)));
} }
} }

View File

@ -1,47 +1,48 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import java.util.Set;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
import java.util.Set;
@ComparatorClass("cityMatch") @ComparatorClass("cityMatch")
public class CityMatch extends AbstractComparator { public class CityMatch extends AbstractComparator {
private Map<String, String> params; private Map<String, String> params;
public CityMatch(Map<String, String> params) { public CityMatch(Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
} }
@Override @Override
public double distance(final String a, final String b, final Config conf) { public double distance(final String a, final String b, final Config conf) {
String ca = cleanup(a); String ca = cleanup(a);
String cb = cleanup(b); String cb = cleanup(b);
ca = normalize(ca); ca = normalize(ca);
cb = normalize(cb); cb = normalize(cb);
ca = filterAllStopWords(ca); ca = filterAllStopWords(ca);
cb = filterAllStopWords(cb); cb = filterAllStopWords(cb);
Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4"))); Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4"))); Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> codes1 = citiesToCodes(cities1); Set<String> codes1 = citiesToCodes(cities1);
Set<String> codes2 = citiesToCodes(cities2); Set<String> codes2 = citiesToCodes(cities2);
//if no cities are detected, the comparator gives 1.0 // if no cities are detected, the comparator gives 1.0
if (codes1.isEmpty() && codes2.isEmpty()) if (codes1.isEmpty() && codes2.isEmpty())
return 1.0; return 1.0;
else { else {
if (codes1.isEmpty() ^ codes2.isEmpty()) if (codes1.isEmpty() ^ codes2.isEmpty())
return -1; //undefined if one of the two has no cities return -1; // undefined if one of the two has no cities
return commonElementsPercentage(codes1, codes2); return commonElementsPercentage(codes1, codes2);
} }
} }
} }

View File

@ -1,5 +1,11 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList; import eu.dnetlib.pace.model.FieldList;
@ -8,46 +14,40 @@ import eu.dnetlib.pace.model.Person;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@ComparatorClass("cosineSimilarity") @ComparatorClass("cosineSimilarity")
public class CosineSimilarity extends AbstractComparator { public class CosineSimilarity extends AbstractComparator {
Map<String, String> params; Map<String, String> params;
public CosineSimilarity(Map<String,String> params) { public CosineSimilarity(Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) if (a.isEmpty() || b.isEmpty())
return -1; return -1;
double[] aVector = ((FieldValueImpl) a).doubleArrayValue(); double[] aVector = ((FieldValueImpl) a).doubleArrayValue();
double[] bVector = ((FieldValueImpl) b).doubleArrayValue(); double[] bVector = ((FieldValueImpl) b).doubleArrayValue();
return cosineSimilarity(aVector, bVector); return cosineSimilarity(aVector, bVector);
} }
double cosineSimilarity(double[] a, double[] b) { double cosineSimilarity(double[] a, double[] b) {
double dotProduct = 0; double dotProduct = 0;
double normASum = 0; double normASum = 0;
double normBSum = 0; double normBSum = 0;
for(int i = 0; i < a.length; i ++) { for (int i = 0; i < a.length; i++) {
dotProduct += a[i] * b[i]; dotProduct += a[i] * b[i];
normASum += a[i] * a[i]; normASum += a[i] * a[i];
normBSum += b[i] * b[i]; normBSum += b[i] * b[i];
} }
double eucledianDist = Math.sqrt(normASum) * Math.sqrt(normBSum);
return dotProduct / eucledianDist;
}
double eucledianDist = Math.sqrt(normASum) * Math.sqrt(normBSum);
return dotProduct / eucledianDist;
}
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map; import java.util.Map;
@ -13,15 +14,15 @@ import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("doiExactMatch") @ComparatorClass("doiExactMatch")
public class DoiExactMatch extends ExactMatchIgnoreCase { public class DoiExactMatch extends ExactMatchIgnoreCase {
public final String PREFIX = "(http:\\/\\/dx\\.doi\\.org\\/)|(doi:)"; public final String PREFIX = "(http:\\/\\/dx\\.doi\\.org\\/)|(doi:)";
public DoiExactMatch(final Map<String, String> params) { public DoiExactMatch(final Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
protected String getValue(final Field f) { protected String getValue(final Field f) {
return super.getValue(f).replaceAll(PREFIX, ""); return super.getValue(f).replaceAll(PREFIX, "");
} }
} }

View File

@ -1,30 +1,31 @@
package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.model.Field; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.Map; import java.util.Map;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("domainExactMatch") @ComparatorClass("domainExactMatch")
public class DomainExactMatch extends ExactMatchIgnoreCase { public class DomainExactMatch extends ExactMatchIgnoreCase {
public DomainExactMatch(final Map<String, String> params) { public DomainExactMatch(final Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
protected String getValue(final Field f) { protected String getValue(final Field f) {
try { try {
return asUrl(super.getValue(f)).getHost(); return asUrl(super.getValue(f)).getHost();
} catch (MalformedURLException e) { } catch (MalformedURLException e) {
return ""; return "";
} }
} }
private URL asUrl(final String value) throws MalformedURLException { private URL asUrl(final String value) throws MalformedURLException {
return new URL(value); return new URL(value);
} }
} }

View File

@ -1,42 +1,44 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("exactMatch") @ComparatorClass("exactMatch")
public class ExactMatch extends AbstractComparator { public class ExactMatch extends AbstractComparator {
public ExactMatch(Map<String, String> params){ public ExactMatch(Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
} }
public ExactMatch(final double weight) { public ExactMatch(final double weight) {
super(weight, new com.wcohen.ss.JaroWinkler()); super(weight, new com.wcohen.ss.JaroWinkler());
} }
protected ExactMatch(final double weight, final AbstractStringDistance ssalgo) { protected ExactMatch(final double weight, final AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
@Override @Override
public double distance(final String a, final String b, final Config conf) { public double distance(final String a, final String b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) { if (a.isEmpty() || b.isEmpty()) {
return -1.0; //return -1 if a field is missing return -1.0; // return -1 if a field is missing
} }
return a.equals(b) ? 1.0 : 0; return a.equals(b) ? 1.0 : 0;
} }
@Override @Override
public double getWeight() { public double getWeight() {
return super.weight; return super.weight;
} }
@Override @Override
protected double normalize(final double d) { protected double normalize(final double d) {
return d; return d;
} }
} }

View File

@ -1,32 +1,33 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("exactMatchIgnoreCase") @ComparatorClass("exactMatchIgnoreCase")
public class ExactMatchIgnoreCase extends AbstractComparator { public class ExactMatchIgnoreCase extends AbstractComparator {
public ExactMatchIgnoreCase(Map<String, String> params) { public ExactMatchIgnoreCase(Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
public double compare(Field a, Field b, final Config conf) { public double compare(Field a, Field b, final Config conf) {
final String fa = getValue(a); final String fa = getValue(a);
final String fb = getValue(b); final String fb = getValue(b);
if (fa.isEmpty() || fb.isEmpty()) if (fa.isEmpty() || fb.isEmpty())
return -1; return -1;
return fa.equalsIgnoreCase(fb) ? 1 : 0; return fa.equalsIgnoreCase(fb) ? 1 : 0;
} }
protected String getValue(final Field f) { protected String getValue(final Field f) {
return getFirstValue(f); return getFirstValue(f);
} }
} }

View File

@ -1,11 +1,5 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Sets; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -13,72 +7,79 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("instanceTypeMatch") @ComparatorClass("instanceTypeMatch")
public class InstanceTypeMatch extends AbstractComparator { public class InstanceTypeMatch extends AbstractComparator {
final Map<String, String> translationMap = new HashMap<>(); final Map<String, String> translationMap = new HashMap<>();
public InstanceTypeMatch(Map<String, String> params){ public InstanceTypeMatch(Map<String, String> params) {
super(params); super(params);
//jolly types // jolly types
translationMap.put("Conference object", "*"); translationMap.put("Conference object", "*");
translationMap.put("Other literature type", "*"); translationMap.put("Other literature type", "*");
translationMap.put("Unknown", "*"); translationMap.put("Unknown", "*");
//article types // article types
translationMap.put("Article", "Article"); translationMap.put("Article", "Article");
translationMap.put("Data Paper", "Article"); translationMap.put("Data Paper", "Article");
translationMap.put("Software Paper", "Article"); translationMap.put("Software Paper", "Article");
translationMap.put("Preprint", "Article"); translationMap.put("Preprint", "Article");
//thesis types // thesis types
translationMap.put("Thesis", "Thesis"); translationMap.put("Thesis", "Thesis");
translationMap.put("Master thesis", "Thesis"); translationMap.put("Master thesis", "Thesis");
translationMap.put("Bachelor thesis", "Thesis"); translationMap.put("Bachelor thesis", "Thesis");
translationMap.put("Doctoral thesis", "Thesis"); translationMap.put("Doctoral thesis", "Thesis");
} }
@Override
public double compare(final Field a, final Field b, final Config conf) {
@Override if (a == null || b == null) {
public double compare(final Field a, final Field b, final Config conf) { return -1;
}
if (a == null || b == null) { final List<String> sa = ((FieldList) a).stringList();
return -1; final List<String> sb = ((FieldList) b).stringList();
}
final List<String> sa = ((FieldList) a).stringList(); if (sa.isEmpty() || sb.isEmpty()) {
final List<String> sb = ((FieldList) b).stringList(); return -1;
}
if (sa.isEmpty() || sb.isEmpty()) { final Set<String> ca = sa.stream().map(this::translate).collect(Collectors.toSet());
return -1; final Set<String> cb = sb.stream().map(this::translate).collect(Collectors.toSet());
}
final Set<String> ca = sa.stream().map(this::translate).collect(Collectors.toSet()); // if at least one is a jolly type, it must produce a match
final Set<String> cb = sb.stream().map(this::translate).collect(Collectors.toSet()); if (ca.contains("*") || cb.contains("*"))
return 1.0;
//if at least one is a jolly type, it must produce a match int incommon = Sets.intersection(ca, cb).size();
if (ca.contains("*") || cb.contains("*"))
return 1.0;
int incommon = Sets.intersection(ca, cb).size(); // if at least one is in common, it must produce a match
return incommon >= 1 ? 1 : 0;
}
//if at least one is in common, it must produce a match public String translate(String term) {
return incommon >= 1 ? 1 : 0; return translationMap.getOrDefault(term, term);
} }
public String translate(String term){ @Override
return translationMap.getOrDefault(term, term); public double getWeight() {
} return super.weight;
}
@Override @Override
public double getWeight() { protected double normalize(final double d) {
return super.weight; return d;
} }
@Override
protected double normalize(final double d) {
return d;
}
} }

View File

@ -1,44 +1,46 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
//case class JaroWinkler(w: Double) extends SecondStringDistanceAlgo(w, new com.wcohen.ss.JaroWinkler()) //case class JaroWinkler(w: Double) extends SecondStringDistanceAlgo(w, new com.wcohen.ss.JaroWinkler())
@ComparatorClass("jaroWinkler") @ComparatorClass("jaroWinkler")
public class JaroWinkler extends AbstractComparator { public class JaroWinkler extends AbstractComparator {
public JaroWinkler(Map<String, String> params){ public JaroWinkler(Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
} }
public JaroWinkler(double weight) { public JaroWinkler(double weight) {
super(weight, new com.wcohen.ss.JaroWinkler()); super(weight, new com.wcohen.ss.JaroWinkler());
} }
protected JaroWinkler(double weight, AbstractStringDistance ssalgo) { protected JaroWinkler(double weight, AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
@Override @Override
public double distance(String a, String b, final Config conf) { public double distance(String a, String b, final Config conf) {
String ca = cleanup(a); String ca = cleanup(a);
String cb = cleanup(b); String cb = cleanup(b);
return normalize(ssalgo.score(ca, cb)); return normalize(ssalgo.score(ca, cb));
} }
@Override @Override
public double getWeight() { public double getWeight() {
return super.weight; return super.weight;
} }
@Override @Override
protected double normalize(double d) { protected double normalize(double d) {
return d; return d;
} }
} }

View File

@ -1,72 +1,74 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("jaroWinklerNormalizedName") @ComparatorClass("jaroWinklerNormalizedName")
public class JaroWinklerNormalizedName extends AbstractComparator { public class JaroWinklerNormalizedName extends AbstractComparator {
private Map<String, String> params; private Map<String, String> params;
public JaroWinklerNormalizedName(Map<String, String> params){ public JaroWinklerNormalizedName(Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
this.params = params; this.params = params;
} }
public JaroWinklerNormalizedName(double weight) { public JaroWinklerNormalizedName(double weight) {
super(weight, new com.wcohen.ss.JaroWinkler()); super(weight, new com.wcohen.ss.JaroWinkler());
} }
protected JaroWinklerNormalizedName(double weight, AbstractStringDistance ssalgo) { protected JaroWinklerNormalizedName(double weight, AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
@Override @Override
public double distance(String a, String b, final Config conf) { public double distance(String a, String b, final Config conf) {
String ca = cleanup(a); String ca = cleanup(a);
String cb = cleanup(b); String cb = cleanup(b);
ca = normalize(ca); ca = normalize(ca);
cb = normalize(cb); cb = normalize(cb);
ca = filterAllStopWords(ca); ca = filterAllStopWords(ca);
cb = filterAllStopWords(cb); cb = filterAllStopWords(cb);
Set<String> keywords1 = getKeywords(ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4"))); Set<String> keywords1 = getKeywords(
Set<String> keywords2 = getKeywords(cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4"))); ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords2 = getKeywords(
cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4"))); Set<String> cities1 = getCities(ca, Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4"))); Set<String> cities2 = getCities(cb, Integer.parseInt(params.getOrDefault("windowSize", "4")));
ca = removeKeywords(ca, keywords1); ca = removeKeywords(ca, keywords1);
ca = removeKeywords(ca, cities1); ca = removeKeywords(ca, cities1);
cb = removeKeywords(cb, keywords2); cb = removeKeywords(cb, keywords2);
cb = removeKeywords(cb, cities2); cb = removeKeywords(cb, cities2);
ca = ca.replaceAll("[ ]{2,}", " "); ca = ca.replaceAll("[ ]{2,}", " ");
cb = cb.replaceAll("[ ]{2,}", " "); cb = cb.replaceAll("[ ]{2,}", " ");
if (ca.isEmpty() && cb.isEmpty()) if (ca.isEmpty() && cb.isEmpty())
return 1.0; return 1.0;
else else
return normalize(ssalgo.score(ca,cb)); return normalize(ssalgo.score(ca, cb));
} }
@Override @Override
public double getWeight() { public double getWeight() {
return super.weight; return super.weight;
} }
@Override @Override
protected double normalize(double d) { protected double normalize(double d) {
return d; return d;
} }
} }

View File

@ -1,18 +1,19 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import java.util.Map; import eu.dnetlib.pace.tree.support.ComparatorClass;
//case class JaroWinkler(w: Double) extends SecondStringDistanceAlgo(w, new com.wcohen.ss.JaroWinkler()) //case class JaroWinkler(w: Double) extends SecondStringDistanceAlgo(w, new com.wcohen.ss.JaroWinkler())
@ComparatorClass("jaroWinklerTitle") @ComparatorClass("jaroWinklerTitle")
public class JaroWinklerTitle extends AbstractComparator { public class JaroWinklerTitle extends AbstractComparator {
public JaroWinklerTitle(Map<String, String> params){ public JaroWinklerTitle(Map<String, String> params) {
super(params, new com.wcohen.ss.JaroWinkler()); super(params, new com.wcohen.ss.JaroWinkler());
} }
@ -23,7 +24,7 @@ public class JaroWinklerTitle extends AbstractComparator {
protected JaroWinklerTitle(double weight, AbstractStringDistance ssalgo) { protected JaroWinklerTitle(double weight, AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
@Override @Override
public double distance(String a, String b, final Config conf) { public double distance(String a, String b, final Config conf) {
String ca = cleanup(a); String ca = cleanup(a);
@ -31,7 +32,7 @@ public class JaroWinklerTitle extends AbstractComparator {
boolean check = checkNumbers(ca, cb); boolean check = checkNumbers(ca, cb);
return check ? 0.5 : normalize(ssalgo.score(ca, cb)); return check ? 0.5 : normalize(ssalgo.score(ca, cb));
} }
@Override @Override
public double getWeight() { public double getWeight() {

View File

@ -1,77 +1,81 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Sets; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.util.MapDocumentUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.util.MapDocumentUtil;
@ComparatorClass("jsonListMatch") @ComparatorClass("jsonListMatch")
public class JsonListMatch extends AbstractComparator { public class JsonListMatch extends AbstractComparator {
private static final Log log = LogFactory.getLog(JsonListMatch.class); private static final Log log = LogFactory.getLog(JsonListMatch.class);
private Map<String, String> params; private Map<String, String> params;
private String MODE; //"percentage" or "count" private String MODE; // "percentage" or "count"
public JsonListMatch(final Map<String, String> params) { public JsonListMatch(final Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
MODE = params.getOrDefault("mode", "percentage"); MODE = params.getOrDefault("mode", "percentage");
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
final List<String> sa = ((FieldList) a).stringList(); final List<String> sa = ((FieldList) a).stringList();
final List<String> sb = ((FieldList) b).stringList(); final List<String> sb = ((FieldList) b).stringList();
if (sa.isEmpty() || sb.isEmpty()) { if (sa.isEmpty() || sb.isEmpty()) {
return -1; return -1;
} }
final Set<String> ca = sa.stream().map(this::toComparableString).collect(Collectors.toSet()); final Set<String> ca = sa.stream().map(this::toComparableString).collect(Collectors.toSet());
final Set<String> cb = sb.stream().map(this::toComparableString).collect(Collectors.toSet()); final Set<String> cb = sb.stream().map(this::toComparableString).collect(Collectors.toSet());
int incommon = Sets.intersection(ca, cb).size(); int incommon = Sets.intersection(ca, cb).size();
int simDiff = Sets.symmetricDifference(ca, cb).size(); int simDiff = Sets.symmetricDifference(ca, cb).size();
if (incommon + simDiff == 0) { if (incommon + simDiff == 0) {
return 0.0; return 0.0;
} }
if (MODE.equals("percentage")) if (MODE.equals("percentage"))
return (double)incommon / (incommon + simDiff); return (double) incommon / (incommon + simDiff);
else else
return incommon; return incommon;
} }
//converts every json into a comparable string basing on parameters // converts every json into a comparable string basing on parameters
private String toComparableString(String json){ private String toComparableString(String json) {
StringBuilder st = new StringBuilder(); //to build the string used for comparisons basing on the jpath into parameters StringBuilder st = new StringBuilder(); // to build the string used for comparisons basing on the jpath into
// parameters
//for each path in the param list // for each path in the param list
for (String key: params.keySet().stream().filter(k -> k.contains("jpath")).collect(Collectors.toList())) { for (String key : params.keySet().stream().filter(k -> k.contains("jpath")).collect(Collectors.toList())) {
String path = params.get(key); String path = params.get(key);
String value = MapDocumentUtil.getJPathString(path, json); String value = MapDocumentUtil.getJPathString(path, json);
if (value == null || value.isEmpty()) if (value == null || value.isEmpty())
value = ""; value = "";
st.append( value + "::"); st.append(value + "::");
} }
st.setLength(st.length()-2); st.setLength(st.length() - 2);
return st.toString(); return st.toString();
} }
} }

View File

@ -1,47 +1,50 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import java.util.Set;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
import java.util.Set;
@ComparatorClass("keywordMatch") @ComparatorClass("keywordMatch")
public class KeywordMatch extends AbstractComparator { public class KeywordMatch extends AbstractComparator {
Map<String, String> params; Map<String, String> params;
public KeywordMatch(Map<String, String> params) { public KeywordMatch(Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
} }
@Override @Override
public double distance(final String a, final String b, final Config conf) { public double distance(final String a, final String b, final Config conf) {
String ca = cleanup(a); String ca = cleanup(a);
String cb = cleanup(b); String cb = cleanup(b);
ca = normalize(ca); ca = normalize(ca);
cb = normalize(cb); cb = normalize(cb);
ca = filterAllStopWords(ca); ca = filterAllStopWords(ca);
cb = filterAllStopWords(cb); cb = filterAllStopWords(cb);
Set<String> keywords1 = getKeywords(ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4"))); Set<String> keywords1 = getKeywords(
Set<String> keywords2 = getKeywords(cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4"))); ca, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> keywords2 = getKeywords(
cb, conf.translationMap(), Integer.parseInt(params.getOrDefault("windowSize", "4")));
Set<String> codes1 = toCodes(keywords1, conf.translationMap()); Set<String> codes1 = toCodes(keywords1, conf.translationMap());
Set<String> codes2 = toCodes(keywords2, conf.translationMap()); Set<String> codes2 = toCodes(keywords2, conf.translationMap());
//if no cities are detected, the comparator gives 1.0 // if no cities are detected, the comparator gives 1.0
if (codes1.isEmpty() && codes2.isEmpty()) if (codes1.isEmpty() && codes2.isEmpty())
return 1.0; return 1.0;
else { else {
if (codes1.isEmpty() ^ codes2.isEmpty()) if (codes1.isEmpty() ^ codes2.isEmpty())
return -1.0; //undefined if one of the two has no keywords return -1.0; // undefined if one of the two has no keywords
return commonElementsPercentage(codes1, codes2); return commonElementsPercentage(codes1, codes2);
} }
} }
} }

View File

@ -1,15 +1,17 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("level2JaroWinkler") @ComparatorClass("level2JaroWinkler")
public class Level2JaroWinkler extends AbstractComparator { public class Level2JaroWinkler extends AbstractComparator {
public Level2JaroWinkler(Map<String, String> params){ public Level2JaroWinkler(Map<String, String> params) {
super(params, new com.wcohen.ss.Level2JaroWinkler()); super(params, new com.wcohen.ss.Level2JaroWinkler());
} }

View File

@ -1,16 +1,18 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
@ComparatorClass("level2JaroWinklerTitle") @ComparatorClass("level2JaroWinklerTitle")
public class Level2JaroWinklerTitle extends AbstractComparator { public class Level2JaroWinklerTitle extends AbstractComparator {
public Level2JaroWinklerTitle(Map<String,String> params){ public Level2JaroWinklerTitle(Map<String, String> params) {
super(params, new com.wcohen.ss.Level2JaroWinkler()); super(params, new com.wcohen.ss.Level2JaroWinkler());
} }
@ -29,7 +31,8 @@ public class Level2JaroWinklerTitle extends AbstractComparator {
final boolean check = checkNumbers(ca, cb); final boolean check = checkNumbers(ca, cb);
if (check) return 0.5; if (check)
return 0.5;
return ssalgo.score(ca, cb); return ssalgo.score(ca, cb);
} }

View File

@ -1,15 +1,17 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("level2Levenstein") @ComparatorClass("level2Levenstein")
public class Level2Levenstein extends AbstractComparator { public class Level2Levenstein extends AbstractComparator {
public Level2Levenstein(Map<String,String> params){ public Level2Levenstein(Map<String, String> params) {
super(params, new com.wcohen.ss.Level2Levenstein()); super(params, new com.wcohen.ss.Level2Levenstein());
} }

View File

@ -1,15 +1,17 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("levenstein") @ComparatorClass("levenstein")
public class Levenstein extends AbstractComparator { public class Levenstein extends AbstractComparator {
public Levenstein(Map<String,String> params){ public Levenstein(Map<String, String> params) {
super(params, new com.wcohen.ss.Levenstein()); super(params, new com.wcohen.ss.Levenstein());
} }

View File

@ -1,21 +1,23 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import com.wcohen.ss.AbstractStringDistance; import java.util.Map;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import java.util.Map; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("levensteinTitle") @ComparatorClass("levensteinTitle")
public class LevensteinTitle extends AbstractComparator { public class LevensteinTitle extends AbstractComparator {
private static final Log log = LogFactory.getLog(LevensteinTitle.class); private static final Log log = LogFactory.getLog(LevensteinTitle.class);
public LevensteinTitle(Map<String,String> params){ public LevensteinTitle(Map<String, String> params) {
super(params, new com.wcohen.ss.Levenstein()); super(params, new com.wcohen.ss.Levenstein());
} }
@ -34,7 +36,8 @@ public class LevensteinTitle extends AbstractComparator {
final boolean check = checkNumbers(ca, cb); final boolean check = checkNumbers(ca, cb);
if (check) return 0.5; if (check)
return 0.5;
return normalize(ssalgo.score(ca, cb), ca.length(), cb.length()); return normalize(ssalgo.score(ca, cb), ca.length(), cb.length());
} }

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
/** /**
* Compared compare between two titles, ignoring version numbers. Suitable for Software entities. * Compared compare between two titles, ignoring version numbers. Suitable for Software entities.
@ -14,7 +15,7 @@ import java.util.Map;
@ComparatorClass("levensteinTitleIgnoreVersion") @ComparatorClass("levensteinTitleIgnoreVersion")
public class LevensteinTitleIgnoreVersion extends AbstractComparator { public class LevensteinTitleIgnoreVersion extends AbstractComparator {
public LevensteinTitleIgnoreVersion(Map<String,String> params){ public LevensteinTitleIgnoreVersion(Map<String, String> params) {
super(params, new com.wcohen.ss.Levenstein()); super(params, new com.wcohen.ss.Levenstein());
} }

View File

@ -1,17 +1,19 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Sets; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
/** /**
* The Class Contains match * The Class Contains match
* *
@ -20,55 +22,54 @@ import java.util.stream.Collectors;
@ComparatorClass("listContainsMatch") @ComparatorClass("listContainsMatch")
public class ListContainsMatch extends AbstractComparator { public class ListContainsMatch extends AbstractComparator {
private Map<String, String> params; private Map<String, String> params;
private boolean CASE_SENSITIVE; private boolean CASE_SENSITIVE;
private String STRING; private String STRING;
private String AGGREGATOR; private String AGGREGATOR;
public ListContainsMatch(Map<String, String> params) { public ListContainsMatch(Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
//read parameters // read parameters
CASE_SENSITIVE = Boolean.parseBoolean(params.getOrDefault("caseSensitive", "false")); CASE_SENSITIVE = Boolean.parseBoolean(params.getOrDefault("caseSensitive", "false"));
STRING = params.get("string"); STRING = params.get("string");
AGGREGATOR = params.get("bool"); AGGREGATOR = params.get("bool");
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
List<String> sa = ((FieldList) a).stringList(); List<String> sa = ((FieldList) a).stringList();
List<String> sb = ((FieldList) b).stringList(); List<String> sb = ((FieldList) b).stringList();
if (sa.isEmpty() || sb.isEmpty()) { if (sa.isEmpty() || sb.isEmpty()) {
return -1; return -1;
} }
if (!CASE_SENSITIVE) { if (!CASE_SENSITIVE) {
sa = sa.stream().map(String::toLowerCase).collect(Collectors.toList()); sa = sa.stream().map(String::toLowerCase).collect(Collectors.toList());
sb = sb.stream().map(String::toLowerCase).collect(Collectors.toList()); sb = sb.stream().map(String::toLowerCase).collect(Collectors.toList());
STRING = STRING.toLowerCase(); STRING = STRING.toLowerCase();
} }
switch(AGGREGATOR) { switch (AGGREGATOR) {
case "AND": case "AND":
if(sa.contains(STRING) && sb.contains(STRING)) if (sa.contains(STRING) && sb.contains(STRING))
return 1.0; return 1.0;
break; break;
case "OR": case "OR":
if(sa.contains(STRING) || sb.contains(STRING)) if (sa.contains(STRING) || sb.contains(STRING))
return 1.0; return 1.0;
break; break;
case "XOR": case "XOR":
if(sa.contains(STRING) ^ sb.contains(STRING)) if (sa.contains(STRING) ^ sb.contains(STRING))
return 1.0; return 1.0;
break; break;
default: default:
return 0.0; return 0.0;
} }
return 0.0; return 0.0;
} }
} }

View File

@ -1,16 +1,18 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import eu.dnetlib.pace.config.Config;
import java.util.Map;
@ComparatorClass("mustBeDifferent") @ComparatorClass("mustBeDifferent")
public class MustBeDifferent extends AbstractComparator { public class MustBeDifferent extends AbstractComparator {
public MustBeDifferent(Map<String,String> params){ public MustBeDifferent(Map<String, String> params) {
super(params, new com.wcohen.ss.Levenstein()); super(params, new com.wcohen.ss.Levenstein());
} }

View File

@ -1,12 +1,13 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.Comparator; import eu.dnetlib.pace.tree.support.Comparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/** /**
* Not all fields of a document need to partecipate in the compare measure. We model those fields as having a * Not all fields of a document need to partecipate in the compare measure. We model those fields as having a
* NullDistanceAlgo. * NullDistanceAlgo.
@ -14,7 +15,7 @@ import java.util.Map;
@ComparatorClass("null") @ComparatorClass("null")
public class NullDistanceAlgo implements Comparator { public class NullDistanceAlgo implements Comparator {
public NullDistanceAlgo(Map<String, String> params){ public NullDistanceAlgo(Map<String, String> params) {
} }
@Override @Override

View File

@ -1,34 +1,35 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("numbersComparator") @ComparatorClass("numbersComparator")
public class NumbersComparator extends AbstractComparator { public class NumbersComparator extends AbstractComparator {
Map<String, String> params; Map<String, String> params;
public NumbersComparator(Map<String, String> params) { public NumbersComparator(Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
} }
@Override @Override
public double distance(String a, String b, Config conf) { public double distance(String a, String b, Config conf) {
//extracts numbers from the field // extracts numbers from the field
String numbers1 = getNumbers(nfd(a)); String numbers1 = getNumbers(nfd(a));
String numbers2 = getNumbers(nfd(b)); String numbers2 = getNumbers(nfd(b));
if (numbers1.isEmpty() || numbers2.isEmpty()) if (numbers1.isEmpty() || numbers2.isEmpty())
return -1.0; return -1.0;
int n1 = Integer.parseInt(numbers1); int n1 = Integer.parseInt(numbers1);
int n2 = Integer.parseInt(numbers2); int n2 = Integer.parseInt(numbers2);
return Math.abs(n1 - n2); return Math.abs(n1 - n2);
} }
} }

View File

@ -1,35 +1,35 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("numbersMatch") @ComparatorClass("numbersMatch")
public class NumbersMatch extends AbstractComparator { public class NumbersMatch extends AbstractComparator {
public NumbersMatch(Map<String, String> params) {
super(params);
}
public NumbersMatch(Map<String, String> params) { @Override
super(params); public double distance(String a, String b, Config conf) {
}
@Override // extracts numbers from the field
public double distance(String a, String b, Config conf) { String numbers1 = getNumbers(nfd(a));
String numbers2 = getNumbers(nfd(b));
//extracts numbers from the field if (numbers1.isEmpty() && numbers2.isEmpty())
String numbers1 = getNumbers(nfd(a)); return 1.0;
String numbers2 = getNumbers(nfd(b));
if (numbers1.isEmpty() && numbers2.isEmpty()) if (numbers1.isEmpty() || numbers2.isEmpty())
return 1.0; return -1.0;
if (numbers1.isEmpty() || numbers2.isEmpty()) if (numbers1.equals(numbers2))
return -1.0; return 1.0;
if (numbers1.equals(numbers2)) return 0.0;
return 1.0; }
}
return 0.0;
}
}

View File

@ -1,35 +1,35 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
@ComparatorClass("romansMatch") @ComparatorClass("romansMatch")
public class RomansMatch extends AbstractComparator { public class RomansMatch extends AbstractComparator {
public RomansMatch(Map<String, String> params) {
super(params);
}
public RomansMatch(Map<String, String> params) { @Override
super(params); public double distance(String a, String b, Config conf) {
}
@Override // extracts romans from the field
public double distance(String a, String b, Config conf) { String romans1 = getRomans(nfd(a));
String romans2 = getRomans(nfd(b));
//extracts romans from the field if (romans1.isEmpty() && romans2.isEmpty())
String romans1 = getRomans(nfd(a)); return 1.0;
String romans2 = getRomans(nfd(b));
if (romans1.isEmpty() && romans2.isEmpty()) if (romans1.isEmpty() || romans2.isEmpty())
return 1.0; return -1.0;
if (romans1.isEmpty() || romans2.isEmpty()) if (romans1.equals(romans2))
return -1.0; return 1.0;
if (romans1.equals(romans2)) return 0.0;
return 1.0; }
return 0.0;
}
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.List; import java.util.List;
@ -18,34 +19,34 @@ import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("sizeMatch") @ComparatorClass("sizeMatch")
public class SizeMatch extends AbstractComparator { public class SizeMatch extends AbstractComparator {
/** /**
* Instantiates a new size match. * Instantiates a new size match.
* *
* @param params * @param params
* the parameters * the parameters
*/ */
public SizeMatch(final Map<String, String> params) { public SizeMatch(final Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) if (a.isEmpty() || b.isEmpty())
return -1; return -1;
return Iterables.size(a) == Iterables.size(b) ? 1 : 0; return Iterables.size(a) == Iterables.size(b) ? 1 : 0;
} }
/** /**
* Checks if is empty. * Checks if is empty.
* *
* @param a * @param a
* the a * the a
* @return true, if is empty * @return true, if is empty
*/ */
protected boolean isEmpty(final Iterable<?> a) { protected boolean isEmpty(final Iterable<?> a) {
return (a == null) || Iterables.isEmpty(a); return (a == null) || Iterables.isEmpty(a);
} }
} }

View File

@ -1,18 +1,20 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractSortedComparator; import eu.dnetlib.pace.tree.support.AbstractSortedComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/** /**
* The Class SortedJaroWinkler. * The Class SortedJaroWinkler.
*/ */
@ComparatorClass("sortedJaroWinkler") @ComparatorClass("sortedJaroWinkler")
public class SortedJaroWinkler extends AbstractSortedComparator { public class SortedJaroWinkler extends AbstractSortedComparator {
public SortedJaroWinkler(Map<String,String> params){ public SortedJaroWinkler(Map<String, String> params) {
super(params, new com.wcohen.ss.Levenstein()); super(params, new com.wcohen.ss.Levenstein());
} }
@ -40,7 +42,6 @@ public class SortedJaroWinkler extends AbstractSortedComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight() * @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight()
*/ */
@Override @Override
@ -50,7 +51,6 @@ public class SortedJaroWinkler extends AbstractSortedComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double) * @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double)
*/ */
@Override @Override

View File

@ -1,11 +1,13 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.tree.support.AbstractSortedComparator; import eu.dnetlib.pace.tree.support.AbstractSortedComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/** /**
* The Class SortedJaroWinkler. * The Class SortedJaroWinkler.
*/ */
@ -22,7 +24,7 @@ public class SortedLevel2JaroWinkler extends AbstractSortedComparator {
super(weight, new com.wcohen.ss.Level2JaroWinkler()); super(weight, new com.wcohen.ss.Level2JaroWinkler());
} }
public SortedLevel2JaroWinkler(final Map<String, String> params){ public SortedLevel2JaroWinkler(final Map<String, String> params) {
super(params, new com.wcohen.ss.Level2JaroWinkler()); super(params, new com.wcohen.ss.Level2JaroWinkler());
} }
@ -40,7 +42,6 @@ public class SortedLevel2JaroWinkler extends AbstractSortedComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight() * @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight()
*/ */
@Override @Override
@ -50,7 +51,6 @@ public class SortedLevel2JaroWinkler extends AbstractSortedComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double) * @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double)
*/ */
@Override @Override

View File

@ -1,11 +1,12 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/** /**
* The Class Contains match * The Class Contains match
* *
@ -14,53 +15,53 @@ import java.util.Map;
@ComparatorClass("stringContainsMatch") @ComparatorClass("stringContainsMatch")
public class StringContainsMatch extends AbstractComparator { public class StringContainsMatch extends AbstractComparator {
private Map<String, String> params; private Map<String, String> params;
private boolean CASE_SENSITIVE; private boolean CASE_SENSITIVE;
private String STRING; private String STRING;
private String AGGREGATOR; private String AGGREGATOR;
public StringContainsMatch(Map<String, String> params) { public StringContainsMatch(Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
//read parameters // read parameters
CASE_SENSITIVE = Boolean.parseBoolean(params.getOrDefault("caseSensitive", "false")); CASE_SENSITIVE = Boolean.parseBoolean(params.getOrDefault("caseSensitive", "false"));
STRING = params.get("string"); STRING = params.get("string");
AGGREGATOR = params.get("aggregator"); AGGREGATOR = params.get("aggregator");
} }
@Override @Override
public double distance(final String a, final String b, final Config conf) { public double distance(final String a, final String b, final Config conf) {
String ca = a; String ca = a;
String cb = b; String cb = b;
if (!CASE_SENSITIVE) { if (!CASE_SENSITIVE) {
ca = a.toLowerCase(); ca = a.toLowerCase();
cb = b.toLowerCase(); cb = b.toLowerCase();
STRING = STRING.toLowerCase(); STRING = STRING.toLowerCase();
} }
if (AGGREGATOR != null) { if (AGGREGATOR != null) {
switch (AGGREGATOR) { switch (AGGREGATOR) {
case "AND": case "AND":
if (ca.contains(STRING) && cb.contains(STRING)) if (ca.contains(STRING) && cb.contains(STRING))
return 1.0; return 1.0;
break; break;
case "OR": case "OR":
if (ca.contains(STRING) || cb.contains(STRING)) if (ca.contains(STRING) || cb.contains(STRING))
return 1.0; return 1.0;
break; break;
case "XOR": case "XOR":
if (ca.contains(STRING) ^ cb.contains(STRING)) if (ca.contains(STRING) ^ cb.contains(STRING))
return 1.0; return 1.0;
break; break;
default: default:
return 0.0; return 0.0;
} }
} }
return 0.0; return 0.0;
} }
} }

View File

@ -1,54 +1,57 @@
package eu.dnetlib.pace.tree;
import com.google.common.collect.Sets; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("stringListMatch") @ComparatorClass("stringListMatch")
public class StringListMatch extends AbstractComparator { public class StringListMatch extends AbstractComparator {
private static final Log log = LogFactory.getLog(StringListMatch.class); private static final Log log = LogFactory.getLog(StringListMatch.class);
private Map<String, String> params; private Map<String, String> params;
final private String TYPE; //percentage or count final private String TYPE; // percentage or count
public StringListMatch(final Map<String, String> params) { public StringListMatch(final Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
TYPE = params.getOrDefault("type", "percentage"); TYPE = params.getOrDefault("type", "percentage");
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
final Set<String> pa = new HashSet<>(((FieldList) a).stringList()); final Set<String> pa = new HashSet<>(((FieldList) a).stringList());
final Set<String> pb = new HashSet<>(((FieldList) b).stringList()); final Set<String> pb = new HashSet<>(((FieldList) b).stringList());
if (pa.isEmpty() || pb.isEmpty()) { if (pa.isEmpty() || pb.isEmpty()) {
return -1; //return undefined if one of the two lists is empty return -1; // return undefined if one of the two lists is empty
} }
int incommon = Sets.intersection(pa, pb).size(); int incommon = Sets.intersection(pa, pb).size();
int simDiff = Sets.symmetricDifference(pa, pb).size(); int simDiff = Sets.symmetricDifference(pa, pb).size();
if (incommon + simDiff == 0) { if (incommon + simDiff == 0) {
return 0.0; return 0.0;
} }
if(TYPE.equals("percentage")) if (TYPE.equals("percentage"))
return (double)incommon / (incommon + simDiff); return (double) incommon / (incommon + simDiff);
else else
return incommon; return incommon;
} }
} }

View File

@ -1,17 +1,18 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config; import java.util.Map;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.Type; import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import java.util.Map;
/** /**
* The Class SubStringLevenstein. * The Class SubStringLevenstein.
*/ */
@ -31,7 +32,7 @@ public class SubStringLevenstein extends AbstractComparator {
super(w, new com.wcohen.ss.Levenstein()); super(w, new com.wcohen.ss.Levenstein());
} }
public SubStringLevenstein(Map<String, String> params){ public SubStringLevenstein(Map<String, String> params) {
super(params, new com.wcohen.ss.Levenstein()); super(params, new com.wcohen.ss.Levenstein());
this.limit = Integer.parseInt(params.getOrDefault("limit", "1")); this.limit = Integer.parseInt(params.getOrDefault("limit", "1"));
} }
@ -66,8 +67,8 @@ public class SubStringLevenstein extends AbstractComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
* * @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#compare(eu.dnetlib.pace.model.Field,
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#compare(eu.dnetlib.pace.model.Field, eu.dnetlib.pace.model.Field) * eu.dnetlib.pace.model.Field)
*/ */
@Override @Override
public double distance(final Field a, final Field b, final Config conf) { public double distance(final Field a, final Field b, final Config conf) {
@ -79,7 +80,6 @@ public class SubStringLevenstein extends AbstractComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight() * @see eu.dnetlib.pace.compare.DistanceAlgo#getWeight()
*/ */
@Override @Override
@ -89,7 +89,6 @@ public class SubStringLevenstein extends AbstractComparator {
/* /*
* (non-Javadoc) * (non-Javadoc)
*
* @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double) * @see eu.dnetlib.pace.compare.SecondStringDistanceAlgo#normalize(double)
*/ */
@Override @Override

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.List; import java.util.List;
@ -17,24 +18,24 @@ import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("titleVersionMatch") @ComparatorClass("titleVersionMatch")
public class TitleVersionMatch extends AbstractComparator { public class TitleVersionMatch extends AbstractComparator {
public TitleVersionMatch(final Map<String, String> params) { public TitleVersionMatch(final Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
final String valueA = getFirstValue(a); final String valueA = getFirstValue(a);
final String valueB = getFirstValue(b); final String valueB = getFirstValue(b);
if (valueA.isEmpty() || valueB.isEmpty()) if (valueA.isEmpty() || valueB.isEmpty())
return -1; return -1;
return notNull(valueA) && notNull(valueB) && !checkNumbers(valueA, valueB) ? 1 : 0; return notNull(valueA) && notNull(valueB) && !checkNumbers(valueA, valueB) ? 1 : 0;
} }
@Override @Override
public String toString() { public String toString() {
return getClass().getSimpleName() + ":" + super.toString(); return getClass().getSimpleName() + ":" + super.toString();
} }
} }

View File

@ -1,59 +1,61 @@
package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.config.Config; package eu.dnetlib.pace.tree;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.apache.commons.lang3.StringUtils;
import java.net.MalformedURLException; import java.net.MalformedURLException;
import java.net.URL; import java.net.URL;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.ComparatorClass;
@ComparatorClass("urlMatcher") @ComparatorClass("urlMatcher")
public class UrlMatcher extends Levenstein { public class UrlMatcher extends Levenstein {
private Map<String, String> params; private Map<String, String> params;
public UrlMatcher(Map<String, String> params){ public UrlMatcher(Map<String, String> params) {
super(params); super(params);
this.params = params; this.params = params;
} }
public UrlMatcher(double weight, Map<String, String> params) { public UrlMatcher(double weight, Map<String, String> params) {
super(weight); super(weight);
this.params = params; this.params = params;
} }
public void setParams(Map<String, String> params) { public void setParams(Map<String, String> params) {
this.params = params; this.params = params;
} }
@Override @Override
public double distance(Field a, Field b, final Config conf) { public double distance(Field a, Field b, final Config conf) {
final URL urlA = asUrl(getFirstValue(a)); final URL urlA = asUrl(getFirstValue(a));
final URL urlB = asUrl(getFirstValue(b)); final URL urlB = asUrl(getFirstValue(b));
if (!urlA.getHost().equalsIgnoreCase(urlB.getHost())) { if (!urlA.getHost().equalsIgnoreCase(urlB.getHost())) {
return 0.0; return 0.0;
} }
Double hostW = Double.parseDouble(params.getOrDefault("host", "0.5")); Double hostW = Double.parseDouble(params.getOrDefault("host", "0.5"));
Double pathW = Double.parseDouble(params.getOrDefault("path", "0.5")); Double pathW = Double.parseDouble(params.getOrDefault("path", "0.5"));
if (StringUtils.isBlank(urlA.getPath()) || StringUtils.isBlank(urlB.getPath())) { if (StringUtils.isBlank(urlA.getPath()) || StringUtils.isBlank(urlB.getPath())) {
return hostW * 0.5; return hostW * 0.5;
} }
return hostW + pathW * super.distance(urlA.getPath(), urlB.getPath(), conf); return hostW + pathW * super.distance(urlA.getPath(), urlB.getPath(), conf);
} }
private URL asUrl(final String value) { private URL asUrl(final String value) {
try { try {
return new URL(value); return new URL(value);
} catch (MalformedURLException e) { } catch (MalformedURLException e) {
// should not happen as checked by pace typing // should not happen as checked by pace typing
throw new IllegalStateException("invalid URL: " + value); throw new IllegalStateException("invalid URL: " + value);
} }
} }
} }

View File

@ -1,12 +1,14 @@
package eu.dnetlib.pace.tree; package eu.dnetlib.pace.tree;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.tree.support.AbstractComparator; import eu.dnetlib.pace.tree.support.AbstractComparator;
import eu.dnetlib.pace.tree.support.ComparatorClass; import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.apache.commons.lang3.StringUtils;
import java.util.Map;
/** /**
* Returns true if the year of the date field in the given documents are the same, false when any of the two is invalid or it's missing. * Returns true if the year of the date field in the given documents are the same, false when any of the two is invalid or it's missing.
@ -16,36 +18,36 @@ import java.util.Map;
@ComparatorClass("yearMatch") @ComparatorClass("yearMatch")
public class YearMatch extends AbstractComparator { public class YearMatch extends AbstractComparator {
private int limit = 4; private int limit = 4;
public YearMatch(final Map<String, String> params) { public YearMatch(final Map<String, String> params) {
super(params); super(params);
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
final String valueA = getNumbers(getFirstValue(a)); final String valueA = getNumbers(getFirstValue(a));
final String valueB = getNumbers(getFirstValue(b)); final String valueB = getNumbers(getFirstValue(b));
if (valueA.isEmpty() || valueB.isEmpty()) if (valueA.isEmpty() || valueB.isEmpty())
return -1; return -1;
final boolean lengthMatch = checkLength(valueA) && checkLength(valueB); final boolean lengthMatch = checkLength(valueA) && checkLength(valueB);
final boolean onemissing = valueA.isEmpty() || valueB.isEmpty(); final boolean onemissing = valueA.isEmpty() || valueB.isEmpty();
return lengthMatch && valueA.equals(valueB) || onemissing ? 1 : 0; return lengthMatch && valueA.equals(valueB) || onemissing ? 1 : 0;
} }
protected boolean checkLength(final String s) { protected boolean checkLength(final String s) {
return s.length() == limit; return s.length() == limit;
} }
protected String getFirstValue(final Field value) { protected String getFirstValue(final Field value) {
return (value != null) && !value.isEmpty() ? StringUtils.left(value.stringValue(), limit) : ""; return (value != null) && !value.isEmpty() ? StringUtils.left(value.stringValue(), limit) : "";
} }
@Override @Override
public String toString() { public String toString() {
return getClass().getSimpleName() + ":" + super.toString(); return getClass().getSimpleName() + ":" + super.toString();
} }
} }

View File

@ -1,124 +1,130 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import java.util.List;
import java.util.Map;
import com.wcohen.ss.AbstractStringDistance; import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.common.AbstractPaceFunctions; import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.Type; import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field; import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList; import eu.dnetlib.pace.model.FieldList;
import java.util.List;
import java.util.Map;
public abstract class AbstractComparator extends AbstractPaceFunctions implements Comparator { public abstract class AbstractComparator extends AbstractPaceFunctions implements Comparator {
/** The ssalgo. */ /** The ssalgo. */
protected AbstractStringDistance ssalgo; protected AbstractStringDistance ssalgo;
/** The weight. */ /** The weight. */
protected double weight = 0.0; protected double weight = 0.0;
private Map<String, String> params; private Map<String, String> params;
protected AbstractComparator(Map<String, String> params) { protected AbstractComparator(Map<String, String> params) {
this.params = params; this.params = params;
} }
protected AbstractComparator(Map<String, String> params, final AbstractStringDistance ssalgo){ protected AbstractComparator(Map<String, String> params, final AbstractStringDistance ssalgo) {
this.params = params; this.params = params;
this.weight = 1.0; this.weight = 1.0;
this.ssalgo = ssalgo; this.ssalgo = ssalgo;
} }
/** /**
* Instantiates a new second string compare algo. * Instantiates a new second string compare algo.
* *
* @param weight * @param weight
* the weight * the weight
* @param ssalgo * @param ssalgo
* the ssalgo * the ssalgo
*/ */
protected AbstractComparator(final double weight, final AbstractStringDistance ssalgo) { protected AbstractComparator(final double weight, final AbstractStringDistance ssalgo) {
this.ssalgo = ssalgo; this.ssalgo = ssalgo;
this.weight = weight; this.weight = weight;
} }
protected AbstractComparator(final AbstractStringDistance ssalgo){ protected AbstractComparator(final AbstractStringDistance ssalgo) {
this.ssalgo = ssalgo; this.ssalgo = ssalgo;
} }
/** /**
* Normalize. * Normalize.
* *
* @param d * @param d
* the d * the d
* @return the double * @return the double
*/ */
protected double normalize(double d) { protected double normalize(double d) {
return d; return d;
} }
/** /**
* Distance. * Distance.
* *
* @param a * @param a
* the a * the a
* @param b * @param b
* the b * the b
* @return the double * @return the double
*/ */
public double distance(final String a, final String b, final Config conf) { public double distance(final String a, final String b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) { if (a.isEmpty() || b.isEmpty()) {
return -1; //return -1 if a field is missing return -1; // return -1 if a field is missing
} }
double score = ssalgo.score(a, b); double score = ssalgo.score(a, b);
return normalize(score); return normalize(score);
} }
/** /**
* Distance. * Distance.
* *
* @param a * @param a
* the a * the a
* @param b * @param b
* the b * the b
* @return the double * @return the double
*/ */
protected double distance(final List<String> a, final List<String> b, final Config conf) { protected double distance(final List<String> a, final List<String> b, final Config conf) {
return distance(concat(a), concat(b), conf); return distance(concat(a), concat(b), conf);
} }
public double distance(final Field a, final Field b, final Config conf) { public double distance(final Field a, final Field b, final Config conf) {
if (a.getType().equals(Type.String) && b.getType().equals(Type.String)) return distance(a.stringValue(), b.stringValue(), conf); if (a.getType().equals(Type.String) && b.getType().equals(Type.String))
if (a.getType().equals(Type.List) && b.getType().equals(Type.List)) return distance(toList(a), toList(b), conf); return distance(a.stringValue(), b.stringValue(), conf);
if (a.getType().equals(Type.List) && b.getType().equals(Type.List))
return distance(toList(a), toList(b), conf);
throw new IllegalArgumentException("invalid types\n- A: " + a.toString() + "\n- B: " + b.toString()); throw new IllegalArgumentException("invalid types\n- A: " + a.toString() + "\n- B: " + b.toString());
} }
@Override @Override
public double compare(final Field a, final Field b, final Config conf) { public double compare(final Field a, final Field b, final Config conf) {
if (a.isEmpty() || b.isEmpty()) if (a.isEmpty() || b.isEmpty())
return -1; return -1;
if (a.getType().equals(Type.String) && b.getType().equals(Type.String)) return distance(a.stringValue(), b.stringValue(), conf); if (a.getType().equals(Type.String) && b.getType().equals(Type.String))
if (a.getType().equals(Type.List) && b.getType().equals(Type.List)) return distance(toList(a), toList(b), conf); return distance(a.stringValue(), b.stringValue(), conf);
if (a.getType().equals(Type.List) && b.getType().equals(Type.List))
return distance(toList(a), toList(b), conf);
throw new IllegalArgumentException("invalid types\n- A: " + a.toString() + "\n- B: " + b.toString()); throw new IllegalArgumentException("invalid types\n- A: " + a.toString() + "\n- B: " + b.toString());
} }
/** /**
* To list. * To list.
* *
* @param list * @param list
* the list * the list
* @return the list * @return the list
*/ */
protected List<String> toList(final Field list) { protected List<String> toList(final Field list) {
return ((FieldList) list).stringList(); return ((FieldList) list).stringList();
} }
public double getWeight(){ public double getWeight() {
return this.weight; return this.weight;
} }
} }

View File

@ -1,38 +1,40 @@
package eu.dnetlib.pace.tree.support;
import com.google.common.collect.Lists; package eu.dnetlib.pace.tree.support;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.google.common.collect.Lists;
import com.wcohen.ss.AbstractStringDistance;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldList;
public abstract class AbstractSortedComparator extends AbstractComparator { public abstract class AbstractSortedComparator extends AbstractComparator {
/** /**
* Instantiates a new sorted second string compare algo. * Instantiates a new sorted second string compare algo.
* *
* @param weight * @param weight
* the weight * the weight
* @param ssalgo * @param ssalgo
* the ssalgo * the ssalgo
*/ */
protected AbstractSortedComparator(final double weight, final AbstractStringDistance ssalgo) { protected AbstractSortedComparator(final double weight, final AbstractStringDistance ssalgo) {
super(weight, ssalgo); super(weight, ssalgo);
} }
protected AbstractSortedComparator(final Map<String, String> params, final AbstractStringDistance ssalgo){ protected AbstractSortedComparator(final Map<String, String> params, final AbstractStringDistance ssalgo) {
super(Double.parseDouble(params.get("weight")), ssalgo); super(Double.parseDouble(params.get("weight")), ssalgo);
} }
@Override @Override
protected List<String> toList(final Field list) { protected List<String> toList(final Field list) {
FieldList fl = (FieldList) list; FieldList fl = (FieldList) list;
List<String> values = Lists.newArrayList(fl.stringList()); List<String> values = Lists.newArrayList(fl.stringList());
Collections.sort(values); Collections.sort(values);
return values; return values;
} }
} }

View File

@ -1,24 +1,21 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import eu.dnetlib.pace.util.PaceException; import eu.dnetlib.pace.util.PaceException;
public enum AggType { public enum AggType {
W_MEAN, //weighted mean W_MEAN, // weighted mean
AVG, //average AVG, // average
SUM, SUM, MAX, MIN, AND, // used for necessary conditions
MAX, OR; // used for sufficient conditions
MIN,
AND, //used for necessary conditions
OR; //used for sufficient conditions
public static AggType getEnum(String value) { public static AggType getEnum(String value) {
try { try {
return AggType.valueOf(value); return AggType.valueOf(value);
} } catch (IllegalArgumentException e) {
catch (IllegalArgumentException e) { throw new PaceException("Undefined aggregation type", e);
throw new PaceException("Undefined aggregation type", e); }
} }
}
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
@ -5,10 +6,9 @@ import eu.dnetlib.pace.model.Field;
public interface Comparator { public interface Comparator {
/* /*
* return : -1 -> can't decide (i.e. missing field) * return : -1 -> can't decide (i.e. missing field) >0 -> similarity degree (depends on the algorithm)
* >0 -> similarity degree (depends on the algorithm) */
* */ public double compare(Field a, Field b, Config conf);
public double compare(Field a, Field b, Config conf);
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
@ -9,5 +10,5 @@ import java.lang.annotation.Target;
@Target(ElementType.TYPE) @Target(ElementType.TYPE)
public @interface ComparatorClass { public @interface ComparatorClass {
public String value(); public String value();
} }

View File

@ -1,82 +1,84 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.util.PaceException;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.util.PaceException;
/** /**
* The class that defines the configuration of each field in the decision tree. * The class that defines the configuration of each field in the decision tree.
* */ * */
public class FieldConf implements Serializable { public class FieldConf implements Serializable {
private String field; //name of the field on which apply the comparator private String field; // name of the field on which apply the comparator
private String comparator; //comparator name private String comparator; // comparator name
private double weight = 1.0; //weight for the field (to be used in the aggregation) private double weight = 1.0; // weight for the field (to be used in the aggregation)
private Map<String,String> params; //parameters private Map<String, String> params; // parameters
private boolean countIfUndefined; private boolean countIfUndefined;
public boolean isCountIfUndefined() { public boolean isCountIfUndefined() {
return countIfUndefined; return countIfUndefined;
} }
public void setCountIfUndefined(boolean countIfUndefined) { public void setCountIfUndefined(boolean countIfUndefined) {
this.countIfUndefined = countIfUndefined; this.countIfUndefined = countIfUndefined;
} }
public FieldConf() { public FieldConf() {
} }
public FieldConf(String field, String comparator, double weight, Map<String, String> params, boolean countIfUndefined) { public FieldConf(String field, String comparator, double weight, Map<String, String> params,
this.field = field; boolean countIfUndefined) {
this.comparator = comparator; this.field = field;
this.weight = weight; this.comparator = comparator;
this.params = params; this.weight = weight;
this.countIfUndefined = countIfUndefined; this.params = params;
} this.countIfUndefined = countIfUndefined;
}
public String getField() { public String getField() {
return field; return field;
} }
public void setField(String field) { public void setField(String field) {
this.field = field; this.field = field;
} }
public String getComparator() { public String getComparator() {
return comparator; return comparator;
} }
public void setComparator(String comparator) { public void setComparator(String comparator) {
this.comparator = comparator; this.comparator = comparator;
} }
public double getWeight() { public double getWeight() {
return weight; return weight;
} }
public void setWeight(double weight) { public void setWeight(double weight) {
this.weight = weight; this.weight = weight;
} }
public Map<String, String> getParams() { public Map<String, String> getParams() {
return params; return params;
} }
public void setParams(Map<String, String> params) { public void setParams(Map<String, String> params) {
this.params = params; this.params = params;
} }
@Override @Override
public String toString() { public String toString() {
try { try {
return new ObjectMapper().writeValueAsString(this); return new ObjectMapper().writeValueAsString(this);
} catch (IOException e) { } catch (IOException e) {
throw new PaceException("Impossible to convert to JSON: ", e); throw new PaceException("Impossible to convert to JSON: ", e);
} }
} }
} }

View File

@ -1,89 +1,90 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.util.PaceException;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.util.PaceException;
/** /**
* The class that contains the result of each comparison in the decision tree * The class that contains the result of each comparison in the decision tree
* */ * */
public class FieldStats implements Serializable { public class FieldStats implements Serializable {
private double weight; //weight for the field (to be used in the aggregation) private double weight; // weight for the field (to be used in the aggregation)
private double threshold; //threshold for the field (to be used in some kind of aggregations) private double threshold; // threshold for the field (to be used in some kind of aggregations)
private double result; //the result of the comparison private double result; // the result of the comparison
private Field a; private Field a;
private Field b; private Field b;
private boolean countIfUndefined; private boolean countIfUndefined;
public FieldStats(double weight, double threshold, double result, boolean countIfUndefined, Field a, Field b) { public FieldStats(double weight, double threshold, double result, boolean countIfUndefined, Field a, Field b) {
this.weight = weight; this.weight = weight;
this.threshold = threshold; this.threshold = threshold;
this.result = result; this.result = result;
this.countIfUndefined = countIfUndefined; this.countIfUndefined = countIfUndefined;
this.a = a; this.a = a;
this.b = b; this.b = b;
} }
public double getThreshold() { public double getThreshold() {
return threshold; return threshold;
} }
public void setThreshold(double threshold) { public void setThreshold(double threshold) {
this.threshold = threshold; this.threshold = threshold;
} }
public double getWeight() { public double getWeight() {
return weight; return weight;
} }
public void setWeight(double weight) { public void setWeight(double weight) {
this.weight = weight; this.weight = weight;
} }
public double getResult() { public double getResult() {
return result; return result;
} }
public void setResult(double result) { public void setResult(double result) {
this.result = result; this.result = result;
} }
public boolean isCountIfUndefined() { public boolean isCountIfUndefined() {
return countIfUndefined; return countIfUndefined;
} }
public void setCountIfUndefined(boolean countIfUndefined) { public void setCountIfUndefined(boolean countIfUndefined) {
this.countIfUndefined = countIfUndefined; this.countIfUndefined = countIfUndefined;
} }
public Field getA() { public Field getA() {
return a; return a;
} }
public void setA(Field a) { public void setA(Field a) {
this.a = a; this.a = a;
} }
public Field getB() { public Field getB() {
return b; return b;
} }
public void setB(Field b) { public void setB(Field b) {
this.b = b; this.b = b;
} }
@Override @Override
public String toString(){ public String toString() {
try { try {
return new ObjectMapper().writeValueAsString(this); return new ObjectMapper().writeValueAsString(this);
} catch (IOException e) { } catch (IOException e) {
throw new PaceException("Impossible to convert to JSON: ", e); throw new PaceException("Impossible to convert to JSON: ", e);
} }
} }
} }

View File

@ -1,18 +1,16 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
public enum MatchType { public enum MatchType {
MATCH, MATCH, NO_MATCH, UNDEFINED;
NO_MATCH,
UNDEFINED;
public static MatchType parse(String value) { public static MatchType parse(String value) {
try { try {
return MatchType.valueOf(value); return MatchType.valueOf(value);
} } catch (IllegalArgumentException e) {
catch (IllegalArgumentException e) { return MatchType.UNDEFINED; // return UNDEFINED if the enum is not parsable
return MatchType.UNDEFINED; //return UNDEFINED if the enum is not parsable }
} }
}
} }

View File

@ -1,150 +1,160 @@
package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.core.JsonProcessingException; package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.PaceException;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.io.StringWriter; import java.io.StringWriter;
import java.util.List; import java.util.List;
import org.apache.commons.lang3.StringUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.config.PaceConfig;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.PaceException;
public class TreeNodeDef implements Serializable { public class TreeNodeDef implements Serializable {
final static String CROSS_COMPARE = "crossCompare"; final static String CROSS_COMPARE = "crossCompare";
private List<FieldConf> fields; private List<FieldConf> fields;
private AggType aggregation; private AggType aggregation;
private double threshold; private double threshold;
private String positive; private String positive;
private String negative; private String negative;
private String undefined; private String undefined;
boolean ignoreUndefined; boolean ignoreUndefined;
public TreeNodeDef(List<FieldConf> fields, AggType aggregation, double threshold, String positive, String negative, String undefined, boolean ignoreUndefined) { public TreeNodeDef(List<FieldConf> fields, AggType aggregation, double threshold, String positive, String negative,
this.fields = fields; String undefined, boolean ignoreUndefined) {
this.aggregation = aggregation; this.fields = fields;
this.threshold = threshold; this.aggregation = aggregation;
this.positive = positive; this.threshold = threshold;
this.negative = negative; this.positive = positive;
this.undefined = undefined; this.negative = negative;
this.ignoreUndefined = ignoreUndefined; this.undefined = undefined;
} this.ignoreUndefined = ignoreUndefined;
}
public TreeNodeDef() {} public TreeNodeDef() {
}
//function for the evaluation of the node // function for the evaluation of the node
public TreeNodeStats evaluate(MapDocument doc1, MapDocument doc2, Config conf) { public TreeNodeStats evaluate(MapDocument doc1, MapDocument doc2, Config conf) {
TreeNodeStats stats = new TreeNodeStats(); TreeNodeStats stats = new TreeNodeStats();
//for each field in the node, it computes the // for each field in the node, it computes the
for (FieldConf fieldConf : fields) { for (FieldConf fieldConf : fields) {
double weight = fieldConf.getWeight(); double weight = fieldConf.getWeight();
double result; double result;
//if the param specifies a cross comparison (i.e. compare elements from different fields), compute the result for both sides and return the maximum // if the param specifies a cross comparison (i.e. compare elements from different fields), compute the
if(fieldConf.getParams().keySet().stream().anyMatch(k -> k.contains(CROSS_COMPARE))) { // result for both sides and return the maximum
String crossField = fieldConf.getParams().get(CROSS_COMPARE); if (fieldConf.getParams().keySet().stream().anyMatch(k -> k.contains(CROSS_COMPARE))) {
double result1 = comparator(fieldConf).compare(doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(crossField), conf); String crossField = fieldConf.getParams().get(CROSS_COMPARE);
double result2 = comparator(fieldConf).compare(doc1.getFieldMap().get(crossField), doc2.getFieldMap().get(fieldConf.getField()), conf); double result1 = comparator(fieldConf)
result = Math.max(result1,result2); .compare(doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(crossField), conf);
} double result2 = comparator(fieldConf)
else { .compare(doc1.getFieldMap().get(crossField), doc2.getFieldMap().get(fieldConf.getField()), conf);
result = comparator(fieldConf).compare(doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(fieldConf.getField()), conf); result = Math.max(result1, result2);
} } else {
result = comparator(fieldConf)
.compare(
doc1.getFieldMap().get(fieldConf.getField()), doc2.getFieldMap().get(fieldConf.getField()),
conf);
}
stats.addFieldStats( stats
fieldConf.getComparator() + " on " + fieldConf.getField() + " " + fields.indexOf(fieldConf), .addFieldStats(
new FieldStats( fieldConf.getComparator() + " on " + fieldConf.getField() + " " + fields.indexOf(fieldConf),
weight, new FieldStats(
Double.parseDouble(fieldConf.getParams().getOrDefault("threshold", "1.0")), weight,
result, Double.parseDouble(fieldConf.getParams().getOrDefault("threshold", "1.0")),
fieldConf.isCountIfUndefined(), result,
doc1.getFieldMap().get(fieldConf.getField()), fieldConf.isCountIfUndefined(),
doc2.getFieldMap().get(fieldConf.getField()) doc1.getFieldMap().get(fieldConf.getField()),
)); doc2.getFieldMap().get(fieldConf.getField())));
} }
return stats; return stats;
} }
private Comparator comparator(final FieldConf field){ private Comparator comparator(final FieldConf field) {
return PaceConfig.resolver.getComparator(field.getComparator(), field.getParams()); return PaceConfig.resolver.getComparator(field.getComparator(), field.getParams());
} }
public List<FieldConf> getFields() { public List<FieldConf> getFields() {
return fields; return fields;
} }
public void setFields(List<FieldConf> fields) { public void setFields(List<FieldConf> fields) {
this.fields = fields; this.fields = fields;
} }
public AggType getAggregation() { public AggType getAggregation() {
return aggregation; return aggregation;
} }
public void setAggregation(AggType aggregation) { public void setAggregation(AggType aggregation) {
this.aggregation = aggregation; this.aggregation = aggregation;
} }
public double getThreshold() { public double getThreshold() {
return threshold; return threshold;
} }
public void setThreshold(double threshold) { public void setThreshold(double threshold) {
this.threshold = threshold; this.threshold = threshold;
} }
public String getPositive() { public String getPositive() {
return positive; return positive;
} }
public void setPositive(String positive) { public void setPositive(String positive) {
this.positive = positive; this.positive = positive;
} }
public String getNegative() { public String getNegative() {
return negative; return negative;
} }
public void setNegative(String negative) { public void setNegative(String negative) {
this.negative = negative; this.negative = negative;
} }
public String getUndefined() { public String getUndefined() {
return undefined; return undefined;
} }
public void setUndefined(String undefined) { public void setUndefined(String undefined) {
this.undefined = undefined; this.undefined = undefined;
} }
public boolean isIgnoreUndefined() { public boolean isIgnoreUndefined() {
return ignoreUndefined; return ignoreUndefined;
} }
public void setIgnoreUndefined(boolean ignoreUndefined) { public void setIgnoreUndefined(boolean ignoreUndefined) {
this.ignoreUndefined = ignoreUndefined; this.ignoreUndefined = ignoreUndefined;
} }
@Override @Override
public String toString() { public String toString() {
try { try {
return new ObjectMapper().writeValueAsString(this); return new ObjectMapper().writeValueAsString(this);
} catch (IOException e) { } catch (IOException e) {
throw new PaceException("Impossible to convert to JSON: ", e); throw new PaceException("Impossible to convert to JSON: ", e);
} }
} }
} }

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import java.io.Serializable; import java.io.Serializable;
@ -6,129 +7,128 @@ import java.util.Map;
public class TreeNodeStats implements Serializable { public class TreeNodeStats implements Serializable {
private Map<String, FieldStats> results; //this is an accumulator for the results of the node private Map<String, FieldStats> results; // this is an accumulator for the results of the node
public TreeNodeStats(){ public TreeNodeStats() {
this.results = new HashMap<>(); this.results = new HashMap<>();
} }
public Map<String, FieldStats> getResults() { public Map<String, FieldStats> getResults() {
return results; return results;
} }
public void addFieldStats(String id, FieldStats fieldStats){ public void addFieldStats(String id, FieldStats fieldStats) {
this.results.put(id, fieldStats); this.results.put(id, fieldStats);
} }
public int fieldsCount(){ public int fieldsCount() {
return this.results.size(); return this.results.size();
} }
public int undefinedCount(){ public int undefinedCount() {
int undefinedCount = 0; int undefinedCount = 0;
for(FieldStats fs: this.results.values()){ for (FieldStats fs : this.results.values()) {
if(fs.getResult() == -1) if (fs.getResult() == -1)
undefinedCount ++; undefinedCount++;
} }
return undefinedCount; return undefinedCount;
} }
public double scoreSum(){ public double scoreSum() {
double scoreSum = 0.0; double scoreSum = 0.0;
for(FieldStats fs: this.results.values()){ for (FieldStats fs : this.results.values()) {
if(fs.getResult()>=0.0) { if (fs.getResult() >= 0.0) {
scoreSum += fs.getResult(); scoreSum += fs.getResult();
} }
} }
return scoreSum; return scoreSum;
} }
//return the sum of the weights without considering the fields with countIfMissing=false && result=-1 // return the sum of the weights without considering the fields with countIfMissing=false && result=-1
public double weightSum(){ public double weightSum() {
double weightSum = 0.0; double weightSum = 0.0;
for(FieldStats fs: this.results.values()){ for (FieldStats fs : this.results.values()) {
if(fs.getResult()>=0.0 || (fs.getResult()<0.0 && fs.isCountIfUndefined())) { if (fs.getResult() >= 0.0 || (fs.getResult() < 0.0 && fs.isCountIfUndefined())) {
weightSum += fs.getWeight(); weightSum += fs.getWeight();
} }
} }
return weightSum; return weightSum;
} }
public double weightedScoreSum(){ public double weightedScoreSum() {
double weightedScoreSum = 0.0; double weightedScoreSum = 0.0;
for(FieldStats fs: this.results.values()){ for (FieldStats fs : this.results.values()) {
if(fs.getResult()>=0.0) { if (fs.getResult() >= 0.0) {
weightedScoreSum += fs.getResult()*fs.getWeight(); weightedScoreSum += fs.getResult() * fs.getWeight();
} }
} }
return weightedScoreSum; return weightedScoreSum;
} }
public double max(){ public double max() {
double max = -1.0; double max = -1.0;
for(FieldStats fs: this.results.values()){ for (FieldStats fs : this.results.values()) {
if(fs.getResult()>max) if (fs.getResult() > max)
max = fs.getResult(); max = fs.getResult();
} }
return max; return max;
} }
public double min(){ public double min() {
double min = 100.0; //random high value double min = 100.0; // random high value
for(FieldStats fs: this.results.values()){ for (FieldStats fs : this.results.values()) {
if(fs.getResult()<min) { if (fs.getResult() < min) {
if (fs.getResult()>=0.0 || (fs.getResult() == -1 && fs.isCountIfUndefined())) if (fs.getResult() >= 0.0 || (fs.getResult() == -1 && fs.isCountIfUndefined()))
min = fs.getResult(); min = fs.getResult();
} }
} }
return min; return min;
} }
//if at least one is true, return 1.0 // if at least one is true, return 1.0
public double or(){ public double or() {
for (FieldStats fieldStats : this.results.values()) { for (FieldStats fieldStats : this.results.values()) {
if (fieldStats.getResult() >= fieldStats.getThreshold()) if (fieldStats.getResult() >= fieldStats.getThreshold())
return 1.0; return 1.0;
} }
return 0.0; return 0.0;
} }
//if at least one is false, return 0.0 // if at least one is false, return 0.0
public double and() { public double and() {
for (FieldStats fieldStats : this.results.values()) { for (FieldStats fieldStats : this.results.values()) {
if (fieldStats.getResult() == -1) { if (fieldStats.getResult() == -1) {
if (fieldStats.isCountIfUndefined()) if (fieldStats.isCountIfUndefined())
return 0.0; return 0.0;
} } else {
else { if (fieldStats.getResult() < fieldStats.getThreshold())
if (fieldStats.getResult() < fieldStats.getThreshold()) return 0.0;
return 0.0; }
}
} }
return 1.0; return 1.0;
} }
public double getFinalScore(AggType aggregation){ public double getFinalScore(AggType aggregation) {
switch (aggregation){ switch (aggregation) {
case AVG: case AVG:
return scoreSum()/fieldsCount(); return scoreSum() / fieldsCount();
case SUM: case SUM:
return scoreSum(); return scoreSum();
case MAX: case MAX:
return max(); return max();
case MIN: case MIN:
return min(); return min();
case W_MEAN: case W_MEAN:
return weightedScoreSum()/weightSum(); return weightedScoreSum() / weightSum();
case OR: case OR:
return or(); return or();
case AND: case AND:
return and(); return and();
default: default:
return 0.0; return 0.0;
} }
} }
} }

View File

@ -1,16 +1,17 @@
package eu.dnetlib.pace.tree.support; package eu.dnetlib.pace.tree.support;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import eu.dnetlib.pace.config.Config; import eu.dnetlib.pace.config.Config;
import eu.dnetlib.pace.model.MapDocument; import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.util.PaceException; import eu.dnetlib.pace.util.PaceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/** /**
* The compare between two documents is given by the weighted mean of the field distances * The compare between two documents is given by the weighted mean of the field distances
*/ */
public class TreeProcessor{ public class TreeProcessor {
private static final Log log = LogFactory.getLog(TreeProcessor.class); private static final Log log = LogFactory.getLog(TreeProcessor.class);
@ -21,35 +22,34 @@ public class TreeProcessor{
} }
public boolean compare(final MapDocument a, final MapDocument b) { public boolean compare(final MapDocument a, final MapDocument b) {
//evaluate the decision tree // evaluate the decision tree
return evaluateTree(a, b).getResult() == MatchType.MATCH; return evaluateTree(a, b).getResult() == MatchType.MATCH;
} }
public TreeStats evaluateTree(final MapDocument doc1, final MapDocument doc2){ public TreeStats evaluateTree(final MapDocument doc1, final MapDocument doc2) {
TreeStats treeStats = new TreeStats(); TreeStats treeStats = new TreeStats();
String current = "start"; String current = "start";
while (MatchType.parse(current)==MatchType.UNDEFINED) { while (MatchType.parse(current) == MatchType.UNDEFINED) {
TreeNodeDef currentNode = config.decisionTree().get(current); TreeNodeDef currentNode = config.decisionTree().get(current);
//throw an exception if the node doesn't exist // throw an exception if the node doesn't exist
if (currentNode == null) if (currentNode == null)
throw new PaceException("Missing tree node: " + current); throw new PaceException("Missing tree node: " + current);
TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config); TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config);
treeStats.addNodeStats(current, stats); treeStats.addNodeStats(current, stats);
//if ignoreUndefined=false the miss is considered as undefined // if ignoreUndefined=false the miss is considered as undefined
if (!currentNode.isIgnoreUndefined() && stats.undefinedCount()>0) { if (!currentNode.isIgnoreUndefined() && stats.undefinedCount() > 0) {
current = currentNode.getUndefined(); current = currentNode.getUndefined();
} }
//if ignoreUndefined=true the miss is ignored and the score computed anyway // if ignoreUndefined=true the miss is ignored and the score computed anyway
else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) { else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) {
current = currentNode.getPositive(); current = currentNode.getPositive();
} } else {
else {
current = currentNode.getNegative(); current = currentNode.getNegative();
} }
@ -63,25 +63,24 @@ public class TreeProcessor{
String current = "start"; String current = "start";
double score = 0.0; double score = 0.0;
while (MatchType.parse(current)==MatchType.UNDEFINED) { while (MatchType.parse(current) == MatchType.UNDEFINED) {
TreeNodeDef currentNode = config.decisionTree().get(current); TreeNodeDef currentNode = config.decisionTree().get(current);
//throw an exception if the node doesn't exist // throw an exception if the node doesn't exist
if (currentNode == null) if (currentNode == null)
throw new PaceException("The Tree Node doesn't exist: " + current); throw new PaceException("The Tree Node doesn't exist: " + current);
TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config); TreeNodeStats stats = currentNode.evaluate(doc1, doc2, config);
score = stats.getFinalScore(currentNode.getAggregation()); score = stats.getFinalScore(currentNode.getAggregation());
//if ignoreUndefined=false the miss is considered as undefined // if ignoreUndefined=false the miss is considered as undefined
if (!currentNode.isIgnoreUndefined() && stats.undefinedCount()>0) { if (!currentNode.isIgnoreUndefined() && stats.undefinedCount() > 0) {
current = currentNode.getUndefined(); current = currentNode.getUndefined();
} }
//if ignoreUndefined=true the miss is ignored and the score computed anyway // if ignoreUndefined=true the miss is ignored and the score computed anyway
else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) { else if (stats.getFinalScore(currentNode.getAggregation()) >= currentNode.getThreshold()) {
current = currentNode.getPositive(); current = currentNode.getPositive();
} } else {
else {
current = currentNode.getNegative(); current = currentNode.getNegative();
} }

View File

@ -1,51 +1,52 @@
package eu.dnetlib.pace.tree.support;
import eu.dnetlib.pace.util.PaceException; package eu.dnetlib.pace.tree.support;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import eu.dnetlib.pace.util.PaceException;
public class TreeStats { public class TreeStats {
//<layer_id, <field:comparator, result>> // <layer_id, <field:comparator, result>>
Map<String, TreeNodeStats> stats; Map<String, TreeNodeStats> stats;
MatchType result; MatchType result;
public TreeStats(){ public TreeStats() {
this.stats = new HashMap<>(); this.stats = new HashMap<>();
this.result = MatchType.NO_MATCH; this.result = MatchType.NO_MATCH;
} }
public MatchType getResult(){ public MatchType getResult() {
return this.result; return this.result;
} }
public void setResult(MatchType result){ public void setResult(MatchType result) {
this.result = result; this.result = result;
} }
public Map<String, TreeNodeStats> getStats() { public Map<String, TreeNodeStats> getStats() {
return stats; return stats;
} }
public void setStats(Map<String, TreeNodeStats> stats) { public void setStats(Map<String, TreeNodeStats> stats) {
this.stats = stats; this.stats = stats;
} }
public void addNodeStats(String layerID, TreeNodeStats treeNodeStats){ public void addNodeStats(String layerID, TreeNodeStats treeNodeStats) {
this.stats.put(layerID, treeNodeStats); this.stats.put(layerID, treeNodeStats);
} }
@Override
public String toString(){
try {
return new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (IOException e) {
throw new PaceException("Impossible to convert to JSON: ", e);
}
}
@Override
public String toString() {
try {
return new ObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(this);
} catch (IOException e) {
throw new PaceException("Impossible to convert to JSON: ", e);
}
}
} }

View File

@ -1,192 +1,217 @@
package eu.dnetlib.pace.util; package eu.dnetlib.pace.util;
import com.google.common.collect.Lists; import java.util.*;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.WfConfig;
import eu.dnetlib.pace.tree.support.TreeProcessor;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.model.MapDocumentComparator;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import java.util.*; import com.google.common.collect.Lists;
import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.WfConfig;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.model.MapDocumentComparator;
import eu.dnetlib.pace.tree.support.TreeProcessor;
public class BlockProcessor { public class BlockProcessor {
public static final List<String> accumulators= new ArrayList<>(); public static final List<String> accumulators = new ArrayList<>();
private static final Log log = LogFactory.getLog(BlockProcessor.class); private static final Log log = LogFactory.getLog(BlockProcessor.class);
private DedupConfig dedupConf; private DedupConfig dedupConf;
public static void constructAccumulator( final DedupConfig dedupConf) { public static void constructAccumulator(final DedupConfig dedupConf) {
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "records per hash key = 1")); accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1"));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField())); accumulators
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), String.format("Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize()))); .add(
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "skip list")); String
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)")); .format(
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold())); "%s::%s", dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField()));
} accumulators
.add(
String
.format(
"%s::%s", dedupConf.getWf().getEntityType(),
String
.format(
"Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(),
dedupConf.getWf().getGroupMaxSize())));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list"));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)"));
accumulators
.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold()));
}
public BlockProcessor(DedupConfig dedupConf) { public BlockProcessor(DedupConfig dedupConf) {
this.dedupConf = dedupConf; this.dedupConf = dedupConf;
} }
public void processSortedBlock(final String key, final List<MapDocument> documents, final Reporter context) { public void processSortedBlock(final String key, final List<MapDocument> documents, final Reporter context) {
if (documents.size() > 1) { if (documents.size() > 1) {
// log.info("reducing key: '" + key + "' records: " + q.size()); // log.info("reducing key: '" + key + "' records: " + q.size());
process(prepare(documents), context); process(prepare(documents), context);
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1);
} }
} }
public void process(final String key, final Iterable<MapDocument> documents, final Reporter context) { public void process(final String key, final Iterable<MapDocument> documents, final Reporter context) {
final Queue<MapDocument> q = prepare(documents); final Queue<MapDocument> q = prepare(documents);
if (q.size() > 1) { if (q.size() > 1) {
// log.info("reducing key: '" + key + "' records: " + q.size()); // log.info("reducing key: '" + key + "' records: " + q.size());
process(simplifyQueue(q, key, context), context); process(simplifyQueue(q, key, context), context);
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1);
} }
} }
private Queue<MapDocument> prepare(final Iterable<MapDocument> documents) { private Queue<MapDocument> prepare(final Iterable<MapDocument> documents) {
final Queue<MapDocument> queue = new PriorityQueue<>(100, new MapDocumentComparator(dedupConf.getWf().getOrderField())); final Queue<MapDocument> queue = new PriorityQueue<>(100,
new MapDocumentComparator(dedupConf.getWf().getOrderField()));
final Set<String> seen = new HashSet<String>(); final Set<String> seen = new HashSet<String>();
final int queueMaxSize = dedupConf.getWf().getQueueMaxSize(); final int queueMaxSize = dedupConf.getWf().getQueueMaxSize();
documents.forEach(doc -> { documents.forEach(doc -> {
if (queue.size() <= queueMaxSize) { if (queue.size() <= queueMaxSize) {
final String id = doc.getIdentifier(); final String id = doc.getIdentifier();
if (!seen.contains(id)) { if (!seen.contains(id)) {
seen.add(id); seen.add(id);
queue.add(doc); queue.add(doc);
} }
} }
}); });
return queue; return queue;
} }
private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram, final Reporter context) { private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram,
final Queue<MapDocument> q = new LinkedList<>(); final Reporter context) {
final Queue<MapDocument> q = new LinkedList<>();
String fieldRef = ""; String fieldRef = "";
final List<MapDocument> tempResults = Lists.newArrayList(); final List<MapDocument> tempResults = Lists.newArrayList();
while (!queue.isEmpty()) { while (!queue.isEmpty()) {
final MapDocument result = queue.remove(); final MapDocument result = queue.remove();
final String orderFieldName = dedupConf.getWf().getOrderField(); final String orderFieldName = dedupConf.getWf().getOrderField();
final Field orderFieldValue = result.values(orderFieldName); final Field orderFieldValue = result.values(orderFieldName);
if (!orderFieldValue.isEmpty()) { if (!orderFieldValue.isEmpty()) {
final String field = NGramUtils.cleanupForOrdering(orderFieldValue.stringValue()); final String field = NGramUtils.cleanupForOrdering(orderFieldValue.stringValue());
if (field.equals(fieldRef)) { if (field.equals(fieldRef)) {
tempResults.add(result); tempResults.add(result);
} else { } else {
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram); populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram);
tempResults.clear(); tempResults.clear();
tempResults.add(result); tempResults.add(result);
fieldRef = field; fieldRef = field;
} }
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1); context
} .incrementCounter(
} dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1);
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram); }
}
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram);
return q; return q;
} }
private void populateSimplifiedQueue(final Queue<MapDocument> q, private void populateSimplifiedQueue(final Queue<MapDocument> q,
final List<MapDocument> tempResults, final List<MapDocument> tempResults,
final Reporter context, final Reporter context,
final String fieldRef, final String fieldRef,
final String ngram) { final String ngram) {
WfConfig wf = dedupConf.getWf(); WfConfig wf = dedupConf.getWf();
if (tempResults.size() < wf.getGroupMaxSize()) { if (tempResults.size() < wf.getGroupMaxSize()) {
q.addAll(tempResults); q.addAll(tempResults);
} else { } else {
context.incrementCounter(wf.getEntityType(), String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()), tempResults.size()); context
.incrementCounter(
wf.getEntityType(),
String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()),
tempResults.size());
// log.info("Skipped field: " + fieldRef + " - size: " + tempResults.size() + " - ngram: " + ngram); // log.info("Skipped field: " + fieldRef + " - size: " + tempResults.size() + " - ngram: " + ngram);
} }
} }
private void process(final Queue<MapDocument> queue, final Reporter context) { private void process(final Queue<MapDocument> queue, final Reporter context) {
while (!queue.isEmpty()) { while (!queue.isEmpty()) {
final MapDocument pivot = queue.remove(); final MapDocument pivot = queue.remove();
final String idPivot = pivot.getIdentifier(); final String idPivot = pivot.getIdentifier();
WfConfig wf = dedupConf.getWf(); WfConfig wf = dedupConf.getWf();
final Field fieldsPivot = pivot.values(wf.getOrderField()); final Field fieldsPivot = pivot.values(wf.getOrderField());
final String fieldPivot = (fieldsPivot == null) || fieldsPivot.isEmpty() ? "" : fieldsPivot.stringValue(); final String fieldPivot = (fieldsPivot == null) || fieldsPivot.isEmpty() ? "" : fieldsPivot.stringValue();
if (fieldPivot != null) { if (fieldPivot != null) {
int i = 0; int i = 0;
for (final MapDocument curr : queue) { for (final MapDocument curr : queue) {
final String idCurr = curr.getIdentifier(); final String idCurr = curr.getIdentifier();
if (mustSkip(idCurr)) { if (mustSkip(idCurr)) {
context.incrementCounter(wf.getEntityType(), "skip list", 1); context.incrementCounter(wf.getEntityType(), "skip list", 1);
break; break;
} }
if (i > wf.getSlidingWindowSize()) { if (i > wf.getSlidingWindowSize()) {
break; break;
} }
final Field fieldsCurr = curr.values(wf.getOrderField()); final Field fieldsCurr = curr.values(wf.getOrderField());
final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null : fieldsCurr.stringValue(); final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null
: fieldsCurr.stringValue();
if (!idCurr.equals(idPivot) && (fieldCurr != null)) { if (!idCurr.equals(idPivot) && (fieldCurr != null)) {
final TreeProcessor treeProcessor = new TreeProcessor(dedupConf); final TreeProcessor treeProcessor = new TreeProcessor(dedupConf);
emitOutput(treeProcessor.compare(pivot, curr), idPivot, idCurr, context); emitOutput(treeProcessor.compare(pivot, curr), idPivot, idCurr, context);
} }
} }
} }
} }
} }
private void emitOutput(final boolean result, final String idPivot, final String idCurr, final Reporter context) { private void emitOutput(final boolean result, final String idPivot, final String idCurr, final Reporter context) {
if (result) { if (result) {
writeSimilarity(context, idPivot, idCurr); writeSimilarity(context, idPivot, idCurr);
context.incrementCounter(dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)", 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)", 1);
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold(), 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold(), 1);
} }
} }
private boolean mustSkip(final String idPivot) { private boolean mustSkip(final String idPivot) {
return dedupConf.getWf().getSkipList().contains(getNsPrefix(idPivot)); return dedupConf.getWf().getSkipList().contains(getNsPrefix(idPivot));
} }
private String getNsPrefix(final String id) { private String getNsPrefix(final String id) {
return StringUtils.substringBetween(id, "|", "::"); return StringUtils.substringBetween(id, "|", "::");
} }
private void writeSimilarity(final Reporter context, final String from, final String to) { private void writeSimilarity(final Reporter context, final String from, final String to) {
final String type = dedupConf.getWf().getEntityType(); final String type = dedupConf.getWf().getEntityType();
context.emit(type, from, to); context.emit(type, from, to);
context.emit(type, to, from); context.emit(type, to, from);
} }
} }

View File

@ -1,6 +1,14 @@
package eu.dnetlib.pace.util; package eu.dnetlib.pace.util;
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import eu.dnetlib.pace.clustering.NGramUtils; import eu.dnetlib.pace.clustering.NGramUtils;
import eu.dnetlib.pace.config.DedupConfig; import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.WfConfig; import eu.dnetlib.pace.config.WfConfig;
@ -9,240 +17,260 @@ import eu.dnetlib.pace.model.MapDocument;
import eu.dnetlib.pace.model.MapDocumentComparator; import eu.dnetlib.pace.model.MapDocumentComparator;
import eu.dnetlib.pace.tree.*; import eu.dnetlib.pace.tree.*;
import eu.dnetlib.pace.tree.support.TreeProcessor; import eu.dnetlib.pace.tree.support.TreeProcessor;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.*;
public class BlockProcessorForTesting { public class BlockProcessorForTesting {
public static final List<String> accumulators= new ArrayList<>(); public static final List<String> accumulators = new ArrayList<>();
private static final Log log = LogFactory.getLog(eu.dnetlib.pace.util.BlockProcessorForTesting.class); private static final Log log = LogFactory.getLog(eu.dnetlib.pace.util.BlockProcessorForTesting.class);
private DedupConfig dedupConf; private DedupConfig dedupConf;
public static void constructAccumulator( final DedupConfig dedupConf) { public static void constructAccumulator(final DedupConfig dedupConf) {
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "records per hash key = 1")); accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "records per hash key = 1"));
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField())); accumulators
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), String.format("Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(), dedupConf.getWf().getGroupMaxSize()))); .add(
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "skip list")); String
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)")); .format(
accumulators.add(String.format("%s::%s",dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold())); "%s::%s", dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField()));
} accumulators
.add(
String
.format(
"%s::%s", dedupConf.getWf().getEntityType(),
String
.format(
"Skipped records for count(%s) >= %s", dedupConf.getWf().getOrderField(),
dedupConf.getWf().getGroupMaxSize())));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "skip list"));
accumulators.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)"));
accumulators
.add(String.format("%s::%s", dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold()));
}
public BlockProcessorForTesting(DedupConfig dedupConf) { public BlockProcessorForTesting(DedupConfig dedupConf) {
this.dedupConf = dedupConf; this.dedupConf = dedupConf;
} }
public void processSortedBlock(final String key, final List<MapDocument> documents, final Reporter context, boolean useTree, boolean noMatch) { public void processSortedBlock(final String key, final List<MapDocument> documents, final Reporter context,
if (documents.size() > 1) { boolean useTree, boolean noMatch) {
if (documents.size() > 1) {
// log.info("reducing key: '" + key + "' records: " + q.size()); // log.info("reducing key: '" + key + "' records: " + q.size());
process(prepare(documents), context, useTree, noMatch); process(prepare(documents), context, useTree, noMatch);
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1);
} }
} }
public void process(final String key, final Iterable<MapDocument> documents, final Reporter context, boolean useTree, boolean noMatch) { public void process(final String key, final Iterable<MapDocument> documents, final Reporter context,
boolean useTree, boolean noMatch) {
final Queue<MapDocument> q = prepare(documents); final Queue<MapDocument> q = prepare(documents);
if (q.size() > 1) { if (q.size() > 1) {
// log.info("reducing key: '" + key + "' records: " + q.size()); // log.info("reducing key: '" + key + "' records: " + q.size());
process(simplifyQueue(q, key, context), context, useTree, noMatch); process(simplifyQueue(q, key, context), context, useTree, noMatch);
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "records per hash key = 1", 1);
} }
} }
private Queue<MapDocument> prepare(final Iterable<MapDocument> documents) { private Queue<MapDocument> prepare(final Iterable<MapDocument> documents) {
final Queue<MapDocument> queue = new PriorityQueue<>(100, new MapDocumentComparator(dedupConf.getWf().getOrderField())); final Queue<MapDocument> queue = new PriorityQueue<>(100,
new MapDocumentComparator(dedupConf.getWf().getOrderField()));
final Set<String> seen = new HashSet<String>(); final Set<String> seen = new HashSet<String>();
final int queueMaxSize = dedupConf.getWf().getQueueMaxSize(); final int queueMaxSize = dedupConf.getWf().getQueueMaxSize();
documents.forEach(doc -> { documents.forEach(doc -> {
if (queue.size() <= queueMaxSize) { if (queue.size() <= queueMaxSize) {
final String id = doc.getIdentifier(); final String id = doc.getIdentifier();
if (!seen.contains(id)) { if (!seen.contains(id)) {
seen.add(id); seen.add(id);
queue.add(doc); queue.add(doc);
} }
} }
}); });
return queue; return queue;
} }
private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram, final Reporter context) { private Queue<MapDocument> simplifyQueue(final Queue<MapDocument> queue, final String ngram,
final Queue<MapDocument> q = new LinkedList<>(); final Reporter context) {
final Queue<MapDocument> q = new LinkedList<>();
String fieldRef = ""; String fieldRef = "";
final List<MapDocument> tempResults = Lists.newArrayList(); final List<MapDocument> tempResults = Lists.newArrayList();
while (!queue.isEmpty()) { while (!queue.isEmpty()) {
final MapDocument result = queue.remove(); final MapDocument result = queue.remove();
final String orderFieldName = dedupConf.getWf().getOrderField(); final String orderFieldName = dedupConf.getWf().getOrderField();
final Field orderFieldValue = result.values(orderFieldName); final Field orderFieldValue = result.values(orderFieldName);
if (!orderFieldValue.isEmpty()) { if (!orderFieldValue.isEmpty()) {
final String field = NGramUtils.cleanupForOrdering(orderFieldValue.stringValue()); final String field = NGramUtils.cleanupForOrdering(orderFieldValue.stringValue());
if (field.equals(fieldRef)) { if (field.equals(fieldRef)) {
tempResults.add(result); tempResults.add(result);
} else { } else {
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram); populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram);
tempResults.clear(); tempResults.clear();
tempResults.add(result); tempResults.add(result);
fieldRef = field; fieldRef = field;
} }
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1); context
} .incrementCounter(
} dedupConf.getWf().getEntityType(), "missing " + dedupConf.getWf().getOrderField(), 1);
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram); }
}
populateSimplifiedQueue(q, tempResults, context, fieldRef, ngram);
return q; return q;
} }
private void populateSimplifiedQueue(final Queue<MapDocument> q, private void populateSimplifiedQueue(final Queue<MapDocument> q,
final List<MapDocument> tempResults, final List<MapDocument> tempResults,
final Reporter context, final Reporter context,
final String fieldRef, final String fieldRef,
final String ngram) { final String ngram) {
WfConfig wf = dedupConf.getWf(); WfConfig wf = dedupConf.getWf();
if (tempResults.size() < wf.getGroupMaxSize()) { if (tempResults.size() < wf.getGroupMaxSize()) {
q.addAll(tempResults); q.addAll(tempResults);
} else { } else {
context.incrementCounter(wf.getEntityType(), String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()), tempResults.size()); context
.incrementCounter(
wf.getEntityType(),
String.format("Skipped records for count(%s) >= %s", wf.getOrderField(), wf.getGroupMaxSize()),
tempResults.size());
// log.info("Skipped field: " + fieldRef + " - size: " + tempResults.size() + " - ngram: " + ngram); // log.info("Skipped field: " + fieldRef + " - size: " + tempResults.size() + " - ngram: " + ngram);
} }
} }
private void process(final Queue<MapDocument> queue, final Reporter context, boolean useTree, boolean noMatch) { private void process(final Queue<MapDocument> queue, final Reporter context, boolean useTree, boolean noMatch) {
while (!queue.isEmpty()) { while (!queue.isEmpty()) {
final MapDocument pivot = queue.remove(); final MapDocument pivot = queue.remove();
final String idPivot = pivot.getIdentifier(); final String idPivot = pivot.getIdentifier();
WfConfig wf = dedupConf.getWf(); WfConfig wf = dedupConf.getWf();
final Field fieldsPivot = pivot.values(wf.getOrderField()); final Field fieldsPivot = pivot.values(wf.getOrderField());
final String fieldPivot = (fieldsPivot == null) || fieldsPivot.isEmpty() ? "" : fieldsPivot.stringValue(); final String fieldPivot = (fieldsPivot == null) || fieldsPivot.isEmpty() ? "" : fieldsPivot.stringValue();
if (fieldPivot != null) { if (fieldPivot != null) {
int i = 0; int i = 0;
for (final MapDocument curr : queue) { for (final MapDocument curr : queue) {
final String idCurr = curr.getIdentifier(); final String idCurr = curr.getIdentifier();
if (mustSkip(idCurr)) { if (mustSkip(idCurr)) {
context.incrementCounter(wf.getEntityType(), "skip list", 1); context.incrementCounter(wf.getEntityType(), "skip list", 1);
break; break;
} }
if (i > wf.getSlidingWindowSize()) { if (i > wf.getSlidingWindowSize()) {
break; break;
} }
final Field fieldsCurr = curr.values(wf.getOrderField()); final Field fieldsCurr = curr.values(wf.getOrderField());
final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null : fieldsCurr.stringValue(); final String fieldCurr = (fieldsCurr == null) || fieldsCurr.isEmpty() ? null
: fieldsCurr.stringValue();
if (!idCurr.equals(idPivot) && (fieldCurr != null)) { if (!idCurr.equals(idPivot) && (fieldCurr != null)) {
//draws no match relations (test purpose) // draws no match relations (test purpose)
if (noMatch) { if (noMatch) {
emitOutput(!new TreeProcessor(dedupConf).compare(pivot, curr), idPivot, idCurr, context); emitOutput(!new TreeProcessor(dedupConf).compare(pivot, curr), idPivot, idCurr, context);
} } else {
else { // use the decision tree implementation or the "normal" implementation of the similarity
//use the decision tree implementation or the "normal" implementation of the similarity score (valid only for publications) // score (valid only for publications)
if (useTree) if (useTree)
emitOutput(new TreeProcessor(dedupConf).compare(pivot, curr), idPivot, idCurr, context); emitOutput(new TreeProcessor(dedupConf).compare(pivot, curr), idPivot, idCurr, context);
else else
emitOutput(publicationCompare(pivot, curr, dedupConf), idPivot, idCurr, context); emitOutput(publicationCompare(pivot, curr, dedupConf), idPivot, idCurr, context);
} }
// if(new TreeProcessor(dedupConf).compare(pivot, curr) != publicationCompare(pivot, curr, dedupConf)) { // if(new TreeProcessor(dedupConf).compare(pivot, curr) != publicationCompare(pivot, curr, dedupConf)) {
// emitOutput(true, idPivot, idCurr, context); // emitOutput(true, idPivot, idCurr, context);
// } // }
} }
} }
} }
} }
} }
protected static boolean compareInstanceType(MapDocument a, MapDocument b, DedupConfig conf) { protected static boolean compareInstanceType(MapDocument a, MapDocument b, DedupConfig conf) {
Map<String, String> params = new HashMap<>(); Map<String, String> params = new HashMap<>();
InstanceTypeMatch instanceTypeMatch = new InstanceTypeMatch(params); InstanceTypeMatch instanceTypeMatch = new InstanceTypeMatch(params);
double compare = instanceTypeMatch.compare(a.getFieldMap().get("instance"), b.getFieldMap().get("instance"), conf); double compare = instanceTypeMatch
return compare>=1.0; .compare(a.getFieldMap().get("instance"), b.getFieldMap().get("instance"), conf);
} return compare >= 1.0;
}
private boolean publicationCompare(MapDocument a, MapDocument b, DedupConfig config) { private boolean publicationCompare(MapDocument a, MapDocument b, DedupConfig config) {
//if the score gives 1, the publications are equivalent // if the score gives 1, the publications are equivalent
Map<String, String> params = new HashMap<>(); Map<String, String> params = new HashMap<>();
params.put("jpath_value", "$.value"); params.put("jpath_value", "$.value");
params.put("jpath_classid", "$.qualifier.classid"); params.put("jpath_classid", "$.qualifier.classid");
params.put("mode", "count"); params.put("mode", "count");
double score = 0.0; double score = 0.0;
//levenstein title // levenstein title
LevensteinTitle levensteinTitle = new LevensteinTitle(params); LevensteinTitle levensteinTitle = new LevensteinTitle(params);
if(levensteinTitle.compare(a.getFieldMap().get("title"), b.getFieldMap().get("title"), config) >= 0.9) { if (levensteinTitle.compare(a.getFieldMap().get("title"), b.getFieldMap().get("title"), config) >= 0.9) {
score += 0.2; score += 0.2;
} }
//pid // pid
JsonListMatch jsonListMatch = new JsonListMatch(params); JsonListMatch jsonListMatch = new JsonListMatch(params);
if (jsonListMatch.compare(a.getFieldMap().get("pid"), b.getFieldMap().get("pid"), config) >= 1.0) { if (jsonListMatch.compare(a.getFieldMap().get("pid"), b.getFieldMap().get("pid"), config) >= 1.0) {
score += 0.5; score += 0.5;
} }
//title version // title version
TitleVersionMatch titleVersionMatch = new TitleVersionMatch(params); TitleVersionMatch titleVersionMatch = new TitleVersionMatch(params);
double result1 = titleVersionMatch.compare(a.getFieldMap().get("title"), b.getFieldMap().get("title"), config); double result1 = titleVersionMatch.compare(a.getFieldMap().get("title"), b.getFieldMap().get("title"), config);
if(result1<0 || result1>=1.0) { if (result1 < 0 || result1 >= 1.0) {
score += 0.1; score += 0.1;
} }
//authors match // authors match
params.remove("mode"); params.remove("mode");
AuthorsMatch authorsMatch = new AuthorsMatch(params); AuthorsMatch authorsMatch = new AuthorsMatch(params);
double result2 = authorsMatch.compare(a.getFieldMap().get("authors"), b.getFieldMap().get("authors"), config); double result2 = authorsMatch.compare(a.getFieldMap().get("authors"), b.getFieldMap().get("authors"), config);
if(result2 <0|| result2>=0.6) { if (result2 < 0 || result2 >= 0.6) {
score += 0.2; score += 0.2;
} }
return score>=0.5; return score >= 0.5;
} }
private void emitOutput(final boolean result, final String idPivot, final String idCurr, final Reporter context) { private void emitOutput(final boolean result, final String idPivot, final String idCurr, final Reporter context) {
if (result) { if (result) {
writeSimilarity(context, idPivot, idCurr); writeSimilarity(context, idPivot, idCurr);
context.incrementCounter(dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)", 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "dedupSimilarity (x2)", 1);
} else { } else {
context.incrementCounter(dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold(), 1); context.incrementCounter(dedupConf.getWf().getEntityType(), "d < " + dedupConf.getWf().getThreshold(), 1);
} }
} }
private boolean mustSkip(final String idPivot) { private boolean mustSkip(final String idPivot) {
return dedupConf.getWf().getSkipList().contains(getNsPrefix(idPivot)); return dedupConf.getWf().getSkipList().contains(getNsPrefix(idPivot));
} }
private String getNsPrefix(final String id) { private String getNsPrefix(final String id) {
return StringUtils.substringBetween(id, "|", "::"); return StringUtils.substringBetween(id, "|", "::");
} }
private void writeSimilarity(final Reporter context, final String from, final String to) { private void writeSimilarity(final Reporter context, final String from, final String to) {
final String type = dedupConf.getWf().getEntityType(); final String type = dedupConf.getWf().getEntityType();
context.emit(type, from, to); context.emit(type, from, to);
} }
} }

View File

@ -1,15 +1,18 @@
package eu.dnetlib.pace.util; package eu.dnetlib.pace.util;
import org.apache.commons.lang3.text.WordUtils;
import com.google.common.base.Function; import com.google.common.base.Function;
import org.apache.commons.lang3.text.WordUtils;
public class Capitalise implements Function<String, String> { public class Capitalise implements Function<String, String> {
private final char[] DELIM = {' ', '-'}; private final char[] DELIM = {
' ', '-'
};
@Override @Override
public String apply(final String s) { public String apply(final String s) {
return WordUtils.capitalize(s.toLowerCase(), DELIM); return WordUtils.capitalize(s.toLowerCase(), DELIM);
} }
}; };

View File

@ -1,3 +1,4 @@
package eu.dnetlib.pace.util; package eu.dnetlib.pace.util;
import com.google.common.base.Function; import com.google.common.base.Function;
@ -7,4 +8,4 @@ public class DotAbbreviations implements Function<String, String> {
public String apply(String s) { public String apply(String s) {
return s.length() == 1 ? s + "." : s; return s.length() == 1 ? s + "." : s;
} }
}; };

View File

@ -1,166 +1,177 @@
package eu.dnetlib.pace.util;
import com.fasterxml.jackson.core.JsonProcessingException; package eu.dnetlib.pace.util;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.Option;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.*;
import net.minidev.json.JSONArray;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.util.*; import java.util.*;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.jayway.jsonpath.Configuration;
import com.jayway.jsonpath.JsonPath;
import com.jayway.jsonpath.Option;
import eu.dnetlib.pace.config.DedupConfig;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.*;
import net.minidev.json.JSONArray;
public class MapDocumentUtil { public class MapDocumentUtil {
public static final String URL_REGEX = "^(http|https|ftp)\\://.*"; public static final String URL_REGEX = "^(http|https|ftp)\\://.*";
public static Predicate<String> urlFilter = s -> s.trim().matches(URL_REGEX); public static Predicate<String> urlFilter = s -> s.trim().matches(URL_REGEX);
public static MapDocument asMapDocumentWithJPath(DedupConfig conf, final String json) { public static MapDocument asMapDocumentWithJPath(DedupConfig conf, final String json) {
MapDocument m = new MapDocument(); MapDocument m = new MapDocument();
m.setIdentifier(getJPathString(conf.getWf().getIdPath(), json)); m.setIdentifier(getJPathString(conf.getWf().getIdPath(), json));
Map<String, Field> stringField = new HashMap<>(); Map<String, Field> stringField = new HashMap<>();
conf.getPace().getModel().forEach(fdef -> { conf.getPace().getModel().forEach(fdef -> {
switch (fdef.getType()) { switch (fdef.getType()) {
case String: case String:
case Int: case Int:
stringField.put(fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(), truncateValue(getJPathString(fdef.getPath(), json), fdef.getLength()))); stringField
break; .put(
case URL: fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(),
String uv = getJPathString(fdef.getPath(), json); truncateValue(getJPathString(fdef.getPath(), json), fdef.getLength())));
if (!urlFilter.test(uv)) uv = ""; break;
stringField.put(fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(), uv)); case URL:
break; String uv = getJPathString(fdef.getPath(), json);
case List: if (!urlFilter.test(uv))
case JSON: uv = "";
FieldListImpl fi = new FieldListImpl(fdef.getName(), fdef.getType()); stringField.put(fdef.getName(), new FieldValueImpl(fdef.getType(), fdef.getName(), uv));
truncateList(getJPathList(fdef.getPath(), json, fdef.getType()), fdef.getSize()) break;
.stream() case List:
.map(item -> new FieldValueImpl(Type.String, fdef.getName(), item)) case JSON:
.forEach(fi::add); FieldListImpl fi = new FieldListImpl(fdef.getName(), fdef.getType());
stringField.put(fdef.getName(), fi); truncateList(getJPathList(fdef.getPath(), json, fdef.getType()), fdef.getSize())
break; .stream()
case DoubleArray: .map(item -> new FieldValueImpl(Type.String, fdef.getName(), item))
stringField.put( .forEach(fi::add);
fdef.getName(), stringField.put(fdef.getName(), fi);
new FieldValueImpl(Type.DoubleArray, break;
fdef.getName(), case DoubleArray:
getJPathArray(fdef.getPath(), json)) stringField
); .put(
break; fdef.getName(),
case StringConcat: new FieldValueImpl(Type.DoubleArray,
String[] jpaths = fdef.getPath().split("\\|\\|\\|"); fdef.getName(),
stringField.put( getJPathArray(fdef.getPath(), json)));
fdef.getName(), break;
new FieldValueImpl(Type.String, case StringConcat:
fdef.getName(), String[] jpaths = fdef.getPath().split("\\|\\|\\|");
truncateValue(Arrays.stream(jpaths).map(jpath -> getJPathString(jpath, json)).collect(Collectors.joining(" ")), stringField
fdef.getLength()) .put(
) fdef.getName(),
); new FieldValueImpl(Type.String,
break; fdef.getName(),
} truncateValue(
}); Arrays
m.setFieldMap(stringField); .stream(jpaths)
return m; .map(jpath -> getJPathString(jpath, json))
} .collect(Collectors.joining(" ")),
fdef.getLength())));
break;
}
});
m.setFieldMap(stringField);
return m;
}
public static List<String> getJPathList(String path, String json, Type type) { public static List<String> getJPathList(String path, String json, Type type) {
if (type == Type.List) if (type == Type.List)
return JsonPath.using(Configuration.defaultConfiguration().addOptions(Option.ALWAYS_RETURN_LIST, Option.SUPPRESS_EXCEPTIONS)).parse(json).read(path); return JsonPath
Object jresult; .using(
List<String> result = new ArrayList<>(); Configuration
try { .defaultConfiguration()
jresult = JsonPath.read(json, path); .addOptions(Option.ALWAYS_RETURN_LIST, Option.SUPPRESS_EXCEPTIONS))
} catch (Throwable e) { .parse(json)
return result; .read(path);
} Object jresult;
if (jresult instanceof JSONArray) { List<String> result = new ArrayList<>();
try {
jresult = JsonPath.read(json, path);
} catch (Throwable e) {
return result;
}
if (jresult instanceof JSONArray) {
((JSONArray) jresult).forEach(it -> { ((JSONArray) jresult).forEach(it -> {
try { try {
result.add(new ObjectMapper().writeValueAsString(it)); result.add(new ObjectMapper().writeValueAsString(it));
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
} }
} });
); return result;
return result; }
}
if (jresult instanceof LinkedHashMap) { if (jresult instanceof LinkedHashMap) {
try { try {
result.add(new ObjectMapper().writeValueAsString(jresult)); result.add(new ObjectMapper().writeValueAsString(jresult));
} catch (JsonProcessingException e) { } catch (JsonProcessingException e) {
} }
return result; return result;
} }
if (jresult instanceof String) { if (jresult instanceof String) {
result.add((String) jresult); result.add((String) jresult);
} }
return result; return result;
} }
public static String getJPathString(final String jsonPath, final String json) {
try {
Object o = JsonPath.read(json, jsonPath);
if (o instanceof String)
return (String) o;
if (o instanceof JSONArray && ((JSONArray) o).size() > 0)
return (String) ((JSONArray) o).get(0);
return "";
} catch (Exception e) {
return "";
}
}
public static String getJPathString(final String jsonPath, final String json) { public static double[] getJPathArray(final String jsonPath, final String json) {
try { try {
Object o = JsonPath.read(json, jsonPath); Object o = JsonPath.read(json, jsonPath);
if (o instanceof String) if (o instanceof double[])
return (String)o; return (double[]) o;
if (o instanceof JSONArray && ((JSONArray)o).size()>0) if (o instanceof JSONArray) {
return (String)((JSONArray)o).get(0); Object[] objects = ((JSONArray) o).toArray();
return ""; double[] array = new double[objects.length];
} catch (Exception e) { for (int i = 0; i < objects.length; i++) {
return ""; if (objects[i] instanceof BigDecimal)
} array[i] = ((BigDecimal) objects[i]).doubleValue();
} else
array[i] = (double) objects[i];
}
return array;
}
return new double[0];
} catch (Exception e) {
e.printStackTrace();
return new double[0];
}
}
public static double[] getJPathArray(final String jsonPath, final String json) { public static String truncateValue(String value, int length) {
try { if (value == null)
Object o = JsonPath.read(json, jsonPath); return "";
if (o instanceof double[])
return (double[]) o;
if (o instanceof JSONArray) {
Object[] objects = ((JSONArray) o).toArray();
double[] array = new double[objects.length];
for (int i = 0; i < objects.length; i++) {
if (objects[i] instanceof BigDecimal)
array[i] = ((BigDecimal)objects[i]).doubleValue();
else
array[i] = (double) objects[i];
}
return array;
}
return new double[0];
}
catch (Exception e) {
e.printStackTrace();
return new double[0];
}
}
if (length == -1 || length > value.length())
return value;
public static String truncateValue(String value, int length) { return value.substring(0, length);
if (value == null) }
return "";
if (length == -1 || length > value.length()) public static List<String> truncateList(List<String> list, int size) {
return value; if (size == -1 || size > list.size())
return list;
return value.substring(0, length); return list.subList(0, size);
} }
public static List<String> truncateList(List<String> list, int size) {
if (size == -1 || size > list.size())
return list;
return list.subList(0, size);
}
} }

View File

@ -1,13 +1,14 @@
package eu.dnetlib.pace.util; package eu.dnetlib.pace.util;
public class PaceException extends RuntimeException { public class PaceException extends RuntimeException {
public PaceException(String s, Throwable e){ public PaceException(String s, Throwable e) {
super(s, e); super(s, e);
} }
public PaceException(String s){ public PaceException(String s) {
super(s); super(s);
} }
} }

View File

@ -1,49 +1,61 @@
package eu.dnetlib.pace.util;
import eu.dnetlib.pace.clustering.ClusteringClass; package eu.dnetlib.pace.util;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.tree.support.Comparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
import org.reflections.Reflections;
import java.io.Serializable; import java.io.Serializable;
import java.lang.reflect.InvocationTargetException; import java.lang.reflect.InvocationTargetException;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.reflections.Reflections;
import eu.dnetlib.pace.clustering.ClusteringClass;
import eu.dnetlib.pace.clustering.ClusteringFunction;
import eu.dnetlib.pace.tree.support.Comparator;
import eu.dnetlib.pace.tree.support.ComparatorClass;
public class PaceResolver implements Serializable { public class PaceResolver implements Serializable {
public static final Reflections CLUSTERING_RESOLVER = new Reflections("eu.dnetlib.pace.clustering"); public static final Reflections CLUSTERING_RESOLVER = new Reflections("eu.dnetlib.pace.clustering");
public static final Reflections COMPARATOR_RESOLVER = new Reflections("eu.dnetlib.pace.tree"); public static final Reflections COMPARATOR_RESOLVER = new Reflections("eu.dnetlib.pace.tree");
private final Map<String, Class<ClusteringFunction>> clusteringFunctions; private final Map<String, Class<ClusteringFunction>> clusteringFunctions;
private final Map<String, Class<Comparator>> comparators; private final Map<String, Class<Comparator>> comparators;
public PaceResolver() { public PaceResolver() {
this.clusteringFunctions = CLUSTERING_RESOLVER.getTypesAnnotatedWith(ClusteringClass.class).stream() this.clusteringFunctions = CLUSTERING_RESOLVER
.filter(ClusteringFunction.class::isAssignableFrom) .getTypesAnnotatedWith(ClusteringClass.class)
.collect(Collectors.toMap(cl -> cl.getAnnotation(ClusteringClass.class).value(), cl -> (Class<ClusteringFunction>)cl)); .stream()
.filter(ClusteringFunction.class::isAssignableFrom)
.collect(
Collectors
.toMap(
cl -> cl.getAnnotation(ClusteringClass.class).value(), cl -> (Class<ClusteringFunction>) cl));
this.comparators = COMPARATOR_RESOLVER.getTypesAnnotatedWith(ComparatorClass.class).stream() this.comparators = COMPARATOR_RESOLVER
.filter(Comparator.class::isAssignableFrom) .getTypesAnnotatedWith(ComparatorClass.class)
.collect(Collectors.toMap(cl -> cl.getAnnotation(ComparatorClass.class).value(), cl -> (Class<Comparator>)cl)); .stream()
} .filter(Comparator.class::isAssignableFrom)
.collect(
Collectors.toMap(cl -> cl.getAnnotation(ComparatorClass.class).value(), cl -> (Class<Comparator>) cl));
}
public ClusteringFunction getClusteringFunction(String name, Map<String, Integer> params) throws PaceException { public ClusteringFunction getClusteringFunction(String name, Map<String, Integer> params) throws PaceException {
try { try {
return clusteringFunctions.get(name).getDeclaredConstructor(Map.class).newInstance(params); return clusteringFunctions.get(name).getDeclaredConstructor(Map.class).newInstance(params);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { } catch (InstantiationException | IllegalAccessException | InvocationTargetException
throw new PaceException(name + " not found ", e); | NoSuchMethodException e) {
} throw new PaceException(name + " not found ", e);
} }
}
public Comparator getComparator(String name, Map<String, String> params) throws PaceException { public Comparator getComparator(String name, Map<String, String> params) throws PaceException {
try { try {
return comparators.get(name).getDeclaredConstructor(Map.class).newInstance(params); return comparators.get(name).getDeclaredConstructor(Map.class).newInstance(params);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException | NullPointerException e) { } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException
throw new PaceException(name + " not found ", e); | NullPointerException e) {
} throw new PaceException(name + " not found ", e);
} }
}
} }

View File

@ -1,11 +1,11 @@
package eu.dnetlib.pace.util;
package eu.dnetlib.pace.util;
import java.io.Serializable; import java.io.Serializable;
public interface Reporter extends Serializable { public interface Reporter extends Serializable {
void incrementCounter(String counterGroup, String counterName, long delta); void incrementCounter(String counterGroup, String counterName, long delta);
void emit(String type, String from, String to); void emit(String type, String from, String to);
} }

View File

@ -1,11 +1,5 @@
package eu.dnetlib.pace;
import eu.dnetlib.pace.common.AbstractPaceFunctions; package eu.dnetlib.pace;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.FieldValueImpl;
import org.apache.commons.io.IOUtils;
import java.io.IOException; import java.io.IOException;
import java.io.StringWriter; import java.io.StringWriter;
@ -13,6 +7,14 @@ import java.nio.charset.StandardCharsets;
import java.util.List; import java.util.List;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.Type;
import eu.dnetlib.pace.model.Field;
import eu.dnetlib.pace.model.FieldListImpl;
import eu.dnetlib.pace.model.FieldValueImpl;
public abstract class AbstractPaceTest extends AbstractPaceFunctions { public abstract class AbstractPaceTest extends AbstractPaceFunctions {
protected String readFromClasspath(final String filename) { protected String readFromClasspath(final String filename) {
@ -41,9 +43,12 @@ public abstract class AbstractPaceTest extends AbstractPaceFunctions {
return new FieldValueImpl(Type.DoubleArray, "array", a); return new FieldValueImpl(Type.DoubleArray, "array", a);
} }
protected Field createFieldList(List<String> strings, String fieldName){ protected Field createFieldList(List<String> strings, String fieldName) {
List<FieldValueImpl> fieldValueStream = strings.stream().map(s -> new FieldValueImpl(Type.String, fieldName, s)).collect(Collectors.toList()); List<FieldValueImpl> fieldValueStream = strings
.stream()
.map(s -> new FieldValueImpl(Type.String, fieldName, s))
.collect(Collectors.toList());
FieldListImpl a = new FieldListImpl(); FieldListImpl a = new FieldListImpl();
a.addAll(fieldValueStream); a.addAll(fieldValueStream);

View File

@ -1,17 +1,20 @@
package eu.dnetlib.pace.clustering;
import com.google.common.collect.Lists; package eu.dnetlib.pace.clustering;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.AbstractPaceTest;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.DedupConfig;
import org.junit.jupiter.api.*;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.junit.jupiter.api.*;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import eu.dnetlib.pace.AbstractPaceTest;
import eu.dnetlib.pace.common.AbstractPaceFunctions;
import eu.dnetlib.pace.config.DedupConfig;
public class ClusteringFunctionTest extends AbstractPaceTest { public class ClusteringFunctionTest extends AbstractPaceTest {
private static Map<String, Integer> params; private static Map<String, Integer> params;
@ -20,7 +23,11 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
@BeforeAll @BeforeAll
public static void setUp() throws Exception { public static void setUp() throws Exception {
params = Maps.newHashMap(); params = Maps.newHashMap();
conf = DedupConfig.load(AbstractPaceFunctions.readFromClasspath("/eu/dnetlib/pace/config/organization.current.conf.json", ClusteringFunctionTest.class)); conf = DedupConfig
.load(
AbstractPaceFunctions
.readFromClasspath(
"/eu/dnetlib/pace/config/organization.current.conf.json", ClusteringFunctionTest.class));
} }
@Test @Test
@ -210,7 +217,7 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
} }
@Test @Test
public void testPersonClustering(){ public void testPersonClustering() {
final ClusteringFunction cf = new PersonClustering(params); final ClusteringFunction cf = new PersonClustering(params);
final String s = "Abd-Alla, Abo-el-nour N."; final String s = "Abd-Alla, Abo-el-nour N.";
@ -224,7 +231,7 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
} }
@Test @Test
public void testPersonHash(){ public void testPersonHash() {
final ClusteringFunction cf = new PersonHash(params); final ClusteringFunction cf = new PersonHash(params);
final String s = "Manghi, Paolo"; final String s = "Manghi, Paolo";
@ -238,7 +245,7 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
} }
@Test @Test
public void testLastNameFirstInitial(){ public void testLastNameFirstInitial() {
final ClusteringFunction cf = new LastNameFirstInitial(params); final ClusteringFunction cf = new LastNameFirstInitial(params);
final String s = "LI Yonghong"; final String s = "LI Yonghong";
@ -246,4 +253,4 @@ public class ClusteringFunctionTest extends AbstractPaceTest {
System.out.println(cf.apply(conf, Lists.newArrayList(title(s)))); System.out.println(cf.apply(conf, Lists.newArrayList(title(s))));
} }
} }

Some files were not shown because too many files have changed in this diff Show More